From 1f2c42fd0be06e7a7ab9cf2c5e70d82988a11856 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 29 May 2015 17:07:04 +0200 Subject: [PATCH 01/16] Mappings: Refactor core index/query time properties into FieldType Mappers are currently used at both index and query time for deciding how to "use" a field. For #8871, we need the index wide view of mappings to have a unified set of settings for each field of a given name within the index. This change moves all the current settings (and methods defining query time behavior) into subclasses of FieldType. In a future PR, this will allow storing the field type at the index level, instead of mappers (which can still have settings that differ per document type). The change is quite large (I'm sorry). I could not see a way to migrate to this in a more piecemeal way. I did leave out cutting over callers of the query methods to using the field type, as that can be done in a follow up. --- .../classic/MapperQueryParser.java | 8 +- .../analyze/TransportAnalyzeAction.java | 4 +- .../TransportGetFieldMappingsIndexAction.java | 16 +- .../cluster/metadata/MappingMetaData.java | 2 +- .../common/geo/builders/ShapeBuilder.java | 2 +- .../index/fielddata/FieldDataType.java | 2 +- .../index/fielddata/IndexFieldData.java | 3 +- .../index/fielddata/IndexFieldDataCache.java | 5 +- .../fielddata/IndexFieldDataService.java | 9 +- .../index/fielddata/ShardFieldData.java | 6 +- .../GlobalOrdinalsIndexFieldData.java | 7 +- .../InternalGlobalOrdinalsIndexFieldData.java | 3 +- .../plain/AbstractIndexFieldData.java | 7 +- .../plain/AbstractIndexGeoPointFieldData.java | 2 +- .../plain/AbstractIndexOrdinalsFieldData.java | 2 +- .../plain/BinaryDVIndexFieldData.java | 2 +- .../plain/BinaryDVNumericIndexFieldData.java | 2 +- .../plain/BytesBinaryDVIndexFieldData.java | 6 +- .../plain/DisabledIndexFieldData.java | 4 +- .../plain/DocValuesIndexFieldData.java | 17 +- .../plain/DoubleArrayIndexFieldData.java | 5 +- .../plain/FSTBytesIndexFieldData.java | 5 +- .../plain/FloatArrayIndexFieldData.java | 5 +- .../plain/GeoPointBinaryDVIndexFieldData.java | 7 +- .../GeoPointCompressedIndexFieldData.java | 7 +- .../GeoPointDoubleArrayIndexFieldData.java | 5 +- .../fielddata/plain/IndexIndexFieldData.java | 5 +- .../plain/NumericDVIndexFieldData.java | 2 +- .../plain/PackedArrayIndexFieldData.java | 5 +- .../plain/PagedBytesIndexFieldData.java | 5 +- .../plain/ParentChildIndexFieldData.java | 9 +- .../plain/SortedNumericDVIndexFieldData.java | 2 +- .../SortedSetDVOrdinalsIndexFieldData.java | 2 +- .../fieldvisitor/SingleFieldsVisitor.java | 2 +- .../index/mapper/DocumentFieldMappers.java | 6 +- .../index/mapper/FieldMapper.java | 152 +----- .../index/mapper/FieldMappersLookup.java | 22 +- .../index/mapper/MappedFieldType.java | 368 +++++++++++++ .../index/mapper/MapperService.java | 8 +- .../mapper/core/AbstractFieldMapper.java | 373 +++++-------- .../index/mapper/core/BinaryFieldMapper.java | 126 +++-- .../index/mapper/core/BooleanFieldMapper.java | 152 +++--- .../index/mapper/core/ByteFieldMapper.java | 174 ++++--- .../mapper/core/CompletionFieldMapper.java | 79 +-- .../index/mapper/core/DateFieldMapper.java | 491 +++++++++--------- .../index/mapper/core/DoubleFieldMapper.java | 189 +++---- .../index/mapper/core/FloatFieldMapper.java | 189 +++---- .../index/mapper/core/IntegerFieldMapper.java | 167 +++--- .../index/mapper/core/LongFieldMapper.java | 168 +++--- .../index/mapper/core/Murmur3FieldMapper.java | 33 +- .../index/mapper/core/NumberFieldMapper.java | 167 +++--- .../index/mapper/core/ShortFieldMapper.java | 174 ++++--- .../index/mapper/core/StringFieldMapper.java | 120 ++--- .../mapper/core/TokenCountFieldMapper.java | 43 +- .../index/mapper/core/TypeParsers.java | 2 +- .../index/mapper/geo/GeoPointFieldMapper.java | 381 ++++++++------ .../index/mapper/geo/GeoShapeFieldMapper.java | 186 ++++--- .../index/mapper/internal/AllFieldMapper.java | 84 +-- .../internal/FieldNamesFieldMapper.java | 67 ++- .../index/mapper/internal/IdFieldMapper.java | 197 +++---- .../mapper/internal/IndexFieldMapper.java | 62 ++- .../mapper/internal/ParentFieldMapper.java | 223 ++++---- .../mapper/internal/RoutingFieldMapper.java | 55 +- .../mapper/internal/SizeFieldMapper.java | 31 +- .../mapper/internal/SourceFieldMapper.java | 71 ++- .../index/mapper/internal/TTLFieldMapper.java | 75 ++- .../mapper/internal/TimestampFieldMapper.java | 122 +++-- .../mapper/internal/TypeFieldMapper.java | 86 +-- .../index/mapper/internal/UidFieldMapper.java | 63 ++- .../mapper/internal/VersionFieldMapper.java | 49 +- .../index/mapper/ip/IpFieldMapper.java | 182 ++++--- .../index/query/CommonTermsQueryParser.java | 4 +- .../index/query/ExistsQueryParser.java | 2 +- .../query/FieldMaskingSpanQueryParser.java | 2 +- .../index/query/GeoShapeQueryParser.java | 4 +- .../index/query/GeohashCellQuery.java | 7 +- .../index/query/MissingQueryParser.java | 2 +- .../index/query/MoreLikeThisQueryParser.java | 2 +- .../index/query/QueryParseContext.java | 8 +- .../index/query/RangeQueryParser.java | 2 +- .../index/query/SimpleQueryStringParser.java | 2 +- .../index/query/SpanTermQueryParser.java | 2 +- .../index/query/TermsQueryParser.java | 2 +- .../index/query/WildcardQueryParser.java | 2 +- .../functionscore/DecayFunctionParser.java | 2 +- .../index/search/MatchQuery.java | 2 +- .../index/search/MultiMatchQuery.java | 2 +- .../geo/IndexedGeoBoundingBoxQuery.java | 12 +- .../index/similarity/SimilarityService.java | 2 +- .../termvectors/ShardTermVectorsService.java | 2 +- .../cache/IndicesFieldDataCache.java | 7 +- .../cache/IndicesFieldDataCacheListener.java | 6 +- .../SingleDocumentPercolatorIndex.java | 4 +- .../elasticsearch/search/SearchService.java | 26 +- .../bucket/children/ChildrenParser.java | 2 +- .../support/AggregationContext.java | 2 +- .../support/format/ValueFormat.java | 2 +- .../support/format/ValueFormatter.java | 2 +- .../support/format/ValueParser.java | 2 +- .../search/fetch/FetchPhase.java | 2 +- .../highlight/FastVectorHighlighter.java | 6 +- .../search/highlight/HighlightUtils.java | 6 +- .../search/highlight/PlainHighlighter.java | 6 +- .../search/highlight/PostingsHighlighter.java | 2 +- .../FragmentBuilderHelper.java | 4 +- .../SourceScoreOrderFragmentsBuilder.java | 4 +- .../SourceSimpleFragmentsBuilder.java | 4 +- .../search/lookup/FieldLookup.java | 4 +- .../search/lookup/LeafFieldsLookup.java | 4 +- .../search/sort/SortParseElement.java | 2 +- .../AnalyzingCompletionLookupProvider.java | 28 +- .../suggest/phrase/PhraseSuggestParser.java | 4 +- .../index/analysis/PreBuiltAnalyzerTests.java | 4 +- .../NoOrdinalsStringFieldDataTests.java | 2 +- .../index/mapper/FieldMappersLookupTests.java | 11 +- .../mapper/all/SimpleAllMapperTests.java | 4 +- .../mapper/core/Murmur3FieldMapperTests.java | 2 +- .../mapper/externalvalues/ExternalMapper.java | 24 +- .../mapper/geo/GeoShapeFieldMapperTests.java | 34 +- .../geo/GeohashMappingGeoPointTests.java | 8 +- .../internal/FieldNamesFieldMapperTests.java | 2 +- .../mapper/merge/TestMergeMapperTests.java | 8 +- .../mapper/multifield/MultiFieldTests.java | 20 +- .../MultiFieldsIntegrationTests.java | 1 + .../mapper/simple/SimpleMapperTests.java | 14 +- .../string/SimpleStringMappingTests.java | 17 +- .../timestamp/TimestampMappingTests.java | 28 +- .../mapper/update/UpdateMappingTests.java | 4 +- .../index/similarity/SimilarityTests.java | 24 +- .../warmer/SimpleIndicesWarmerTests.java | 2 +- .../search/child/ParentFieldLoadingTest.java | 18 +- .../child/SimpleChildQuerySearchTests.java | 2 +- .../search/geo/GeoShapeIntegrationTests.java | 4 +- .../AnalyzingCompletionLookupProviderV1.java | 28 +- .../CompletionPostingsFormatTest.java | 35 +- .../test/ElasticsearchIntegrationTest.java | 5 +- 136 files changed, 3247 insertions(+), 2558 deletions(-) create mode 100644 src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java diff --git a/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 84fcc62af81..593c8aa80cd 100644 --- a/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -260,7 +260,7 @@ public class MapperQueryParser extends QueryParser { } } if (query == null) { - query = super.getFieldQuery(currentMapper.names().indexName(), queryText, quoted); + query = super.getFieldQuery(currentMapper.fieldType().names().indexName(), queryText, quoted); } return query; } @@ -372,7 +372,7 @@ public class MapperQueryParser extends QueryParser { Query rangeQuery; if (currentMapper instanceof DateFieldMapper && settings.timeZone() != null) { DateFieldMapper dateFieldMapper = (DateFieldMapper) this.currentMapper; - rangeQuery = dateFieldMapper.rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, parseContext); + rangeQuery = dateFieldMapper.fieldType().rangeQuery(part1, part2, startInclusive, endInclusive, settings.timeZone(), null, parseContext); } else { rangeQuery = currentMapper.rangeQuery(part1, part2, startInclusive, endInclusive, parseContext); } @@ -508,7 +508,7 @@ public class MapperQueryParser extends QueryParser { query = currentMapper.prefixQuery(termStr, multiTermRewriteMethod, parseContext); } if (query == null) { - query = getPossiblyAnalyzedPrefixQuery(currentMapper.names().indexName(), termStr); + query = getPossiblyAnalyzedPrefixQuery(currentMapper.fieldType().names().indexName(), termStr); } return query; } @@ -644,7 +644,7 @@ public class MapperQueryParser extends QueryParser { if (!forcedAnalyzer) { setAnalyzer(parseContext.getSearchAnalyzer(currentMapper)); } - indexedNameField = currentMapper.names().indexName(); + indexedNameField = currentMapper.fieldType().names().indexName(); return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr); } return getPossiblyAnalyzedWildcardQuery(indexedNameField, termStr); diff --git a/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index bf18ee1ab8a..0b44c9484ce 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -113,8 +113,8 @@ public class TransportAnalyzeAction extends TransportSingleCustomOperationAction if (fieldMapper.isNumeric()) { throw new IllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are not supported on numeric fields"); } - analyzer = fieldMapper.indexAnalyzer(); - field = fieldMapper.names().indexName(); + analyzer = fieldMapper.fieldType().indexAnalyzer(); + field = fieldMapper.fieldType().names().indexName(); } } diff --git a/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 5ded196f0f3..e9ef1538df2 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -179,7 +179,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO for (String field : request.fields()) { if (Regex.isMatchAllPattern(field)) { for (FieldMapper fieldMapper : allFieldMappers) { - addFieldMapper(fieldMapper.names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); + addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); } } else if (Regex.isSimpleMatchPattern(field)) { // go through the field mappers 3 times, to make sure we give preference to the resolve order: full name, index name, name. @@ -187,22 +187,22 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO Collection remainingFieldMappers = Lists.newLinkedList(allFieldMappers); for (Iterator it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); - if (Regex.simpleMatch(field, fieldMapper.names().fullName())) { - addFieldMapper(fieldMapper.names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); + if (Regex.simpleMatch(field, fieldMapper.fieldType().names().fullName())) { + addFieldMapper(fieldMapper.fieldType().names().fullName(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } for (Iterator it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); - if (Regex.simpleMatch(field, fieldMapper.names().indexName())) { - addFieldMapper(fieldMapper.names().indexName(), fieldMapper, fieldMappings, request.includeDefaults()); + if (Regex.simpleMatch(field, fieldMapper.fieldType().names().indexName())) { + addFieldMapper(fieldMapper.fieldType().names().indexName(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } for (Iterator it = remainingFieldMappers.iterator(); it.hasNext(); ) { final FieldMapper fieldMapper = it.next(); - if (Regex.simpleMatch(field, fieldMapper.names().shortName())) { - addFieldMapper(fieldMapper.names().shortName(), fieldMapper, fieldMappings, request.includeDefaults()); + if (Regex.simpleMatch(field, fieldMapper.fieldType().names().shortName())) { + addFieldMapper(fieldMapper.fieldType().names().shortName(), fieldMapper, fieldMappings, request.includeDefaults()); it.remove(); } } @@ -229,7 +229,7 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleCustomO builder.startObject(); fieldMapper.toXContent(builder, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS); builder.endObject(); - fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.names().fullName(), builder.bytes())); + fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().names().fullName(), builder.bytes())); } catch (IOException e) { throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); } diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index e6067c46817..4c376465b66 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -289,7 +289,7 @@ public class MappingMetaData extends AbstractDiffable { this.id = new Id(docMapper.idFieldMapper().path()); this.routing = new Routing(docMapper.routingFieldMapper().required(), docMapper.routingFieldMapper().path()); this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().path(), - docMapper.timestampFieldMapper().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(), + docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(), docMapper.timestampFieldMapper().ignoreMissing()); this.hasParentField = docMapper.parentFieldMapper().active(); } diff --git a/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index 07ceaf1b762..3e733dbd619 100644 --- a/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -728,7 +728,7 @@ public abstract class ShapeBuilder implements ToXContent { Distance radius = null; CoordinateNode node = null; GeometryCollectionBuilder geometryCollections = null; - Orientation requestedOrientation = (shapeMapper == null) ? Orientation.RIGHT : shapeMapper.orientation(); + Orientation requestedOrientation = (shapeMapper == null) ? Orientation.RIGHT : shapeMapper.fieldType().orientation(); XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { diff --git a/src/main/java/org/elasticsearch/index/fielddata/FieldDataType.java b/src/main/java/org/elasticsearch/index/fielddata/FieldDataType.java index f42ba96b762..371b802dc0c 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/FieldDataType.java +++ b/src/main/java/org/elasticsearch/index/fielddata/FieldDataType.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.fielddata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; /** */ diff --git a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index daca8e1bfbc..3070c1e56ad 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexComponent; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -77,7 +78,7 @@ public interface IndexFieldData extends IndexCompone /** * The field name. */ - FieldMapper.Names getFieldNames(); + MappedFieldType.Names getFieldNames(); /** * The field data type. diff --git a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java index a2b73221d91..76d9c24da29 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java +++ b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataCache.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; import org.apache.lucene.util.Accountable; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; /** * A simple field data cache abstraction on the *index* level. @@ -47,9 +48,9 @@ public interface IndexFieldDataCache { interface Listener { - void onLoad(FieldMapper.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage); + void onLoad(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage); - void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes); + void onUnload(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes); } class None implements IndexFieldDataCache { diff --git a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index 847fa59df48..c3fb6309907 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.plain.*; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.core.BooleanFieldMapper; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; @@ -46,6 +47,8 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; +import static org.elasticsearch.index.mapper.MappedFieldType.Names; + /** */ public class IndexFieldDataService extends AbstractIndexComponent { @@ -226,12 +229,12 @@ public class IndexFieldDataService extends AbstractIndexComponent { @SuppressWarnings("unchecked") public > IFD getForField(FieldMapper mapper) { - final FieldMapper.Names fieldNames = mapper.names(); - final FieldDataType type = mapper.fieldDataType(); + final Names fieldNames = mapper.fieldType().names(); + final FieldDataType type = mapper.fieldType().fieldDataType(); if (type == null) { throw new IllegalArgumentException("found no fielddata type for field [" + fieldNames.fullName() + "]"); } - final boolean docValues = mapper.hasDocValues(); + final boolean docValues = mapper.fieldType().hasDocValues(); final String key = fieldNames.indexName(); IndexFieldData fieldData = loadedFieldData.get(key); if (fieldData == null) { diff --git a/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java index 70d8bb18534..f5edf6e21de 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/ShardFieldData.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.ShardId; @@ -62,7 +62,7 @@ public class ShardFieldData extends AbstractIndexShardComponent implements Index } @Override - public void onLoad(FieldMapper.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { + public void onLoad(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable ramUsage) { totalMetric.inc(ramUsage.ramBytesUsed()); String keyFieldName = fieldNames.indexName(); CounterMetric total = perFieldTotals.get(keyFieldName); @@ -79,7 +79,7 @@ public class ShardFieldData extends AbstractIndexShardComponent implements Index } @Override - public void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onUnload(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { if (wasEvicted) { evictionsMetric.inc(); } diff --git a/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java index bf06cb3433d..3b4db994a8e 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsIndexFieldData.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.MultiValueMode; import java.util.Collection; @@ -41,11 +42,11 @@ import java.util.Collections; */ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexOrdinalsFieldData, Accountable { - private final FieldMapper.Names fieldNames; + private final MappedFieldType.Names fieldNames; private final FieldDataType fieldDataType; private final long memorySizeInBytes; - protected GlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) { + protected GlobalOrdinalsIndexFieldData(Index index, Settings settings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, long memorySizeInBytes) { super(index, settings); this.fieldNames = fieldNames; this.fieldDataType = fieldDataType; @@ -68,7 +69,7 @@ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponen } @Override - public FieldMapper.Names getFieldNames() { + public MappedFieldType.Names getFieldNames() { return fieldNames; } diff --git a/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java index 69a39465df6..b91d98f4a62 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import java.util.Collection; @@ -38,7 +39,7 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel private final Atomic[] atomicReaders; - InternalGlobalOrdinalsIndexFieldData(Index index, Settings settings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) { + InternalGlobalOrdinalsIndexFieldData(Index index, Settings settings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) { super(index, settings, fieldNames, fieldDataType, memorySizeInBytes); this.atomicReaders = new Atomic[segmentAfd.length]; for (int i = 0; i < segmentAfd.length; i++) { diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java index c78a10d4786..f5038c2a17c 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexFieldData.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.settings.IndexSettings; import java.io.IOException; @@ -38,11 +39,11 @@ import java.io.IOException; */ public abstract class AbstractIndexFieldData extends AbstractIndexComponent implements IndexFieldData { - private final FieldMapper.Names fieldNames; + private final MappedFieldType.Names fieldNames; protected final FieldDataType fieldDataType; protected final IndexFieldDataCache cache; - public AbstractIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) { + public AbstractIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) { super(index, indexSettings); this.fieldNames = fieldNames; this.fieldDataType = fieldDataType; @@ -50,7 +51,7 @@ public abstract class AbstractIndexFieldData extends } @Override - public FieldMapper.Names getFieldNames() { + public MappedFieldType.Names getFieldNames() { return this.fieldNames; } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java index 1759067f780..b225ba2e6d2 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java index 74e77d6e921..893efa69ceb 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexOrdinalsFieldData.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java index f731cd8eb29..2e03b74a41f 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVIndexFieldData.java @@ -25,7 +25,7 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData { diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVNumericIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVNumericIndexFieldData.java index 4404ae461df..c78da7c6446 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVNumericIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVNumericIndexFieldData.java @@ -39,7 +39,7 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java index feabfce0092..369682f377c 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVIndexFieldData.java @@ -29,7 +29,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; @@ -67,8 +67,8 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme public IndexFieldData build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore breaker - final Names fieldNames = mapper.names(); - return new BytesBinaryDVIndexFieldData(index, fieldNames, mapper.fieldDataType()); + final Names fieldNames = mapper.fieldType().names(); + return new BytesBinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType()); } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java index c1dfe339131..e0c82e2f7c2 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/DisabledIndexFieldData.java @@ -25,7 +25,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.search.MultiValueMode; @@ -42,7 +42,7 @@ public final class DisabledIndexFieldData extends AbstractIndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore Circuit Breaker - return new DisabledIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache); + return new DisabledIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache); } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java index d3bea6283eb..d2343b36bf5 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java @@ -31,7 +31,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.IdFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; @@ -93,8 +94,8 @@ public abstract class DocValuesIndexFieldData { public IndexFieldData build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore Circuit Breaker - final FieldMapper.Names fieldNames = mapper.names(); - final Settings fdSettings = mapper.fieldDataType().getSettings(); + final Names fieldNames = mapper.fieldType().names(); + final Settings fdSettings = mapper.fieldType().fieldDataType().getSettings(); final Map filter = fdSettings.getGroups("filter"); if (filter != null && !filter.isEmpty()) { throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldNames.fullName() + "]"); @@ -102,19 +103,19 @@ public abstract class DocValuesIndexFieldData { if (BINARY_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) { assert numericType == null; - return new BinaryDVIndexFieldData(index, fieldNames, mapper.fieldDataType()); + return new BinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType()); } else if (NUMERIC_INDEX_FIELD_NAMES.contains(fieldNames.indexName())) { assert !numericType.isFloatingPoint(); - return new NumericDVIndexFieldData(index, fieldNames, mapper.fieldDataType()); + return new NumericDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType()); } else if (numericType != null) { if (Version.indexCreated(indexSettings).onOrAfter(Version.V_1_4_0_Beta1)) { - return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, mapper.fieldDataType()); + return new SortedNumericDVIndexFieldData(index, fieldNames, numericType, mapper.fieldType().fieldDataType()); } else { // prior to ES 1.4: multi-valued numerics were boxed inside a byte[] as BINARY - return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, mapper.fieldDataType()); + return new BinaryDVNumericIndexFieldData(index, fieldNames, numericType, mapper.fieldType().fieldDataType()); } } else { - return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, mapper.fieldDataType()); + return new SortedSetDVOrdinalsIndexFieldData(index, cache, indexSettings, fieldNames, breakerService, mapper.fieldType().fieldDataType()); } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayIndexFieldData.java index 6d4b9dbc1db..6b99ad05771 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/DoubleArrayIndexFieldData.java @@ -53,6 +53,7 @@ import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorS import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -74,11 +75,11 @@ public class DoubleArrayIndexFieldData extends AbstractIndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new DoubleArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService); + return new DoubleArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService); } } - public DoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public DoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache); this.breakerService = breakerService; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/FSTBytesIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/FSTBytesIndexFieldData.java index 3f460376e5c..1aa45a517c0 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/FSTBytesIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/FSTBytesIndexFieldData.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -48,11 +49,11 @@ public class FSTBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { @Override public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new FSTBytesIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService); + return new FSTBytesIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService); } } - FSTBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, FieldDataType fieldDataType, + FSTBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache, breakerService); this.breakerService = breakerService; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayIndexFieldData.java index 8f7bee30247..b50c742e15e 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/FloatArrayIndexFieldData.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSo import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -73,11 +74,11 @@ public class FloatArrayIndexFieldData extends AbstractIndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new FloatArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService); + return new FloatArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService); } } - public FloatArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public FloatArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache); this.breakerService = breakerService; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointBinaryDVIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointBinaryDVIndexFieldData.java index 37a3fccd0eb..e33512a668f 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointBinaryDVIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointBinaryDVIndexFieldData.java @@ -27,7 +27,8 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; @@ -65,8 +66,8 @@ public class GeoPointBinaryDVIndexFieldData extends DocValuesIndexFieldData impl public IndexFieldData build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { // Ignore breaker - final FieldMapper.Names fieldNames = mapper.names(); - return new GeoPointBinaryDVIndexFieldData(index, fieldNames, mapper.fieldDataType()); + final Names fieldNames = mapper.fieldType().names(); + return new GeoPointBinaryDVIndexFieldData(index, fieldNames, mapper.fieldType().fieldDataType()); } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointCompressedIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointCompressedIndexFieldData.java index 1b48b014239..6bea9d873e5 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointCompressedIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointCompressedIndexFieldData.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; import org.elasticsearch.index.settings.IndexSettings; @@ -54,7 +55,7 @@ public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointField @Override public IndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - FieldDataType type = mapper.fieldDataType(); + FieldDataType type = mapper.fieldType().fieldDataType(); final String precisionAsString = type.getSettings().get(PRECISION_KEY); final Distance precision; if (precisionAsString != null) { @@ -62,13 +63,13 @@ public class GeoPointCompressedIndexFieldData extends AbstractIndexGeoPointField } else { precision = DEFAULT_PRECISION_VALUE; } - return new GeoPointCompressedIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, precision, breakerService); + return new GeoPointCompressedIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, precision, breakerService); } } private final GeoPointFieldMapper.Encoding encoding; - public GeoPointCompressedIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public GeoPointCompressedIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, Distance precision, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache); diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDoubleArrayIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDoubleArrayIndexFieldData.java index 7a0beb06353..ae41404d53a 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDoubleArrayIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDoubleArrayIndexFieldData.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -48,11 +49,11 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexGeoPointFiel @Override public IndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService); + return new GeoPointDoubleArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService); } } - public GeoPointDoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public GeoPointDoubleArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache); this.breakerService = breakerService; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java index 3fb3a82eb6a..1789d2e148b 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/IndexIndexFieldData.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -47,7 +48,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData { @Override public IndexFieldData build(Index index, Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new IndexIndexFieldData(index, mapper.names()); + return new IndexIndexFieldData(index, mapper.fieldType().names()); } } @@ -101,7 +102,7 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData { private final AtomicOrdinalsFieldData atomicFieldData; - private IndexIndexFieldData(Index index, FieldMapper.Names names) { + private IndexIndexFieldData(Index index, MappedFieldType.Names names) { super(index, Settings.EMPTY, names, new FieldDataType("string"), null, null); atomicFieldData = new IndexAtomicFieldData(index().name()); } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/NumericDVIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/NumericDVIndexFieldData.java index 710ddba04a4..49e03015cba 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/NumericDVIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/NumericDVIndexFieldData.java @@ -31,7 +31,7 @@ import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java index 50f6e631e84..71af0e0de60 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/PackedArrayIndexFieldData.java @@ -57,6 +57,7 @@ import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSou import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -86,14 +87,14 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new PackedArrayIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, numericType, breakerService); + return new PackedArrayIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, numericType, breakerService); } } private final NumericType numericType; private final CircuitBreakerService breakerService; - public PackedArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public PackedArrayIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, NumericType numericType, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache); diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java index 8d91132b698..91487fe3e69 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesIndexFieldData.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -49,11 +50,11 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData { @Override public IndexOrdinalsFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new PagedBytesIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, breakerService); + return new PagedBytesIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, breakerService); } } - public PagedBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, FieldMapper.Names fieldNames, + public PagedBytesIndexFieldData(Index index, @IndexSettings Settings indexSettings, MappedFieldType.Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) { super(index, indexSettings, fieldNames, fieldDataType, cache, breakerService); } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java index e748805e329..51460fa8b73 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java @@ -61,7 +61,8 @@ import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentTypeListener; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper; @@ -96,7 +97,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData build(Index index, @IndexSettings Settings indexSettings, FieldMapper mapper, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - return new ParentChildIndexFieldData(index, indexSettings, mapper.names(), mapper.fieldDataType(), cache, - mapperService, breakerService); + return new ParentChildIndexFieldData(index, indexSettings, mapper.fieldType().names(), mapper.fieldType().fieldDataType(), cache, + mapperService, breakerService); } } diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java index 18995573ee4..32bd21c3759 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/SortedNumericDVIndexFieldData.java @@ -40,7 +40,7 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; diff --git a/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java b/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java index 7ca547f6159..9d29b3b1a8a 100644 --- a/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java +++ b/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java @@ -28,7 +28,7 @@ import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.indices.breaker.CircuitBreakerService; diff --git a/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java b/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java index 945dee616ca..74572f21bd8 100644 --- a/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java +++ b/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java @@ -67,7 +67,7 @@ public class SingleFieldsVisitor extends FieldsVisitor { if (fieldsValues == null) { return; } - List fieldValues = fieldsValues.get(mapper.names().indexName()); + List fieldValues = fieldsValues.get(mapper.fieldType().names().indexName()); if (fieldValues == null) { return; } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index e4f61db2df1..f7166ad769a 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -59,19 +59,19 @@ public final class DocumentFieldMappers implements Iterable { FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function>() { @Override public Map.Entry apply(FieldMapper input) { - return Maps.immutableEntry(input.names().indexName(), input.indexAnalyzer()); + return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().indexAnalyzer()); } })); FieldNameAnalyzer searchAnalyzer = this.searchAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function>() { @Override public Map.Entry apply(FieldMapper input) { - return Maps.immutableEntry(input.names().indexName(), input.searchAnalyzer()); + return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchAnalyzer()); } })); FieldNameAnalyzer searchQuoteAnalyzer = this.searchQuoteAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function>() { @Override public Map.Entry apply(FieldMapper input) { - return Maps.immutableEntry(input.names().indexName(), input.searchQuoteAnalyzer()); + return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchQuoteAnalyzer()); } })); return new DocumentFieldMappers(fieldMappers, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer); diff --git a/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 172c05c32ba..02f6459b76b 100644 --- a/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -19,9 +19,6 @@ package org.elasticsearch.index.mapper; -import com.google.common.base.Strings; -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.search.MultiTermQuery; @@ -30,10 +27,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.List; @@ -45,146 +40,7 @@ public interface FieldMapper extends Mapper { String DOC_VALUES_FORMAT = "doc_values_format"; - class Names { - - private final String shortName; - - private final String indexName; - - private final String originalIndexName; - - private final String fullName; - - public Names(String name) { - this(name, name, name, name); - } - - public Names(String shortName, String indexName, String originalIndexName, String fullName) { - this.shortName = shortName; - this.indexName = indexName; - this.originalIndexName = originalIndexName; - this.fullName = fullName; - } - - /** - * The logical name of the field. - */ - public String shortName() { - return shortName; - } - - /** - * The indexed name of the field. This is the name under which we will - * store it in the index. - */ - public String indexName() { - return indexName; - } - - /** - * The original index name, before any "path" modifications performed on it. - */ - public String originalIndexName() { - return originalIndexName; - } - - /** - * The full name, including dot path. - */ - public String fullName() { - return fullName; - } - - @Override - public boolean equals(Object o) { - if (o == null || getClass() != o.getClass()) return false; - - Names names = (Names) o; - - if (!fullName.equals(names.fullName)) return false; - if (!indexName.equals(names.indexName)) return false; - if (!originalIndexName.equals(names.originalIndexName)) return false; - if (!shortName.equals(names.shortName)) return false; - - return true; - } - - @Override - public int hashCode() { - int result = shortName.hashCode(); - result = 31 * result + indexName.hashCode(); - result = 31 * result + originalIndexName.hashCode(); - result = 31 * result + fullName.hashCode(); - return result; - } - } - - enum Loading { - LAZY { - @Override - public String toString() { - return LAZY_VALUE; - } - }, - EAGER { - @Override - public String toString() { - return EAGER_VALUE; - } - }, - EAGER_GLOBAL_ORDINALS { - @Override - public String toString() { - return EAGER_GLOBAL_ORDINALS_VALUE; - } - }; - - public static final String KEY = "loading"; - public static final String EAGER_GLOBAL_ORDINALS_VALUE = "eager_global_ordinals"; - public static final String EAGER_VALUE = "eager"; - public static final String LAZY_VALUE = "lazy"; - - public static Loading parse(String loading, Loading defaultValue) { - if (Strings.isNullOrEmpty(loading)) { - return defaultValue; - } else if (EAGER_GLOBAL_ORDINALS_VALUE.equalsIgnoreCase(loading)) { - return EAGER_GLOBAL_ORDINALS; - } else if (EAGER_VALUE.equalsIgnoreCase(loading)) { - return EAGER; - } else if (LAZY_VALUE.equalsIgnoreCase(loading)) { - return LAZY; - } else { - throw new MapperParsingException("Unknown [" + KEY + "] value: [" + loading + "]"); - } - } - - } - - Names names(); - - FieldType fieldType(); - - float boost(); - - /** - * The analyzer that will be used to index the field. - */ - Analyzer indexAnalyzer(); - - /** - * The analyzer that will be used to search the field. - */ - Analyzer searchAnalyzer(); - - /** - * The analyzer that will be used for quoted search on the field. - */ - Analyzer searchQuoteAnalyzer(); - - /** - * Similarity used for scoring queries on the field - */ - SimilarityProvider similarity(); + MappedFieldType fieldType(); /** * List of fields where this field should be copied to @@ -236,18 +92,12 @@ public interface FieldMapper extends Mapper { @Nullable Query nullValueFilter(); - FieldDataType fieldDataType(); - boolean isNumeric(); boolean isSortable(); boolean supportsNullValue(); - boolean hasDocValues(); - - Loading normsLoading(Loading defaultLoading); - /** * Fields might not be available before indexing, for example _all, token_count,... * When get is called and these fields are requested, this case needs special treatment. diff --git a/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java b/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java index d751c95910e..eda694a939d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java +++ b/src/main/java/org/elasticsearch/index/mapper/FieldMappersLookup.java @@ -53,7 +53,7 @@ class FieldMappersLookup implements Iterable { CopyOnWriteHashMap map = this.mappers; for (FieldMapper mapper : newMappers) { - String key = mapper.names().fullName(); + String key = mapper.fieldType().names().fullName(); FieldMappers mappers = map.get(key); if (mappers == null) { @@ -76,13 +76,13 @@ class FieldMappersLookup implements Iterable { public FieldMappers indexName(String indexName) { FieldMappers fieldMappers = fullName(indexName); if (fieldMappers != null) { - if (fieldMappers.mapper().names().indexName().equals(indexName)) { + if (fieldMappers.mapper().fieldType().names().indexName().equals(indexName)) { return fieldMappers; } } fieldMappers = new FieldMappers(); for (FieldMapper mapper : this) { - if (mapper.names().indexName().equals(indexName)) { + if (mapper.fieldType().names().indexName().equals(indexName)) { fieldMappers = fieldMappers.concat(mapper); } } @@ -117,10 +117,10 @@ class FieldMappersLookup implements Iterable { public Collection simpleMatchToIndexNames(String pattern) { Set fields = Sets.newHashSet(); for (FieldMapper fieldMapper : this) { - if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) { - fields.add(fieldMapper.names().indexName()); - } else if (Regex.simpleMatch(pattern, fieldMapper.names().indexName())) { - fields.add(fieldMapper.names().indexName()); + if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) { + fields.add(fieldMapper.fieldType().names().indexName()); + } else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) { + fields.add(fieldMapper.fieldType().names().indexName()); } } return fields; @@ -132,10 +132,10 @@ class FieldMappersLookup implements Iterable { public Collection simpleMatchToFullName(String pattern) { Set fields = Sets.newHashSet(); for (FieldMapper fieldMapper : this) { - if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) { - fields.add(fieldMapper.names().fullName()); - } else if (Regex.simpleMatch(pattern, fieldMapper.names().indexName())) { - fields.add(fieldMapper.names().fullName()); + if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) { + fields.add(fieldMapper.fieldType().names().fullName()); + } else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) { + fields.add(fieldMapper.fieldType().names().fullName()); } } return fields; diff --git a/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java new file mode 100644 index 00000000000..644af16991b --- /dev/null +++ b/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -0,0 +1,368 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import com.google.common.base.Strings; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.queries.TermsQuery; +import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.action.fieldstats.FieldStats; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.similarity.SimilarityProvider; + +import java.io.IOException; +import java.util.List; + +/** + * This defines the core properties and functions to operate on a field. + */ +public class MappedFieldType extends FieldType { + + public static class Names { + + private final String shortName; + + private final String indexName; + + private final String originalIndexName; + + private final String fullName; + + public Names(String name) { + this(name, name, name, name); + } + + public Names(String shortName, String indexName, String originalIndexName, String fullName) { + this.shortName = shortName; + this.indexName = indexName; + this.originalIndexName = originalIndexName; + this.fullName = fullName; + } + + /** + * The logical name of the field. + */ + public String shortName() { + return shortName; + } + + /** + * The indexed name of the field. This is the name under which we will + * store it in the index. + */ + public String indexName() { + return indexName; + } + + /** + * The original index name, before any "path" modifications performed on it. + */ + public String originalIndexName() { + return originalIndexName; + } + + /** + * The full name, including dot path. + */ + public String fullName() { + return fullName; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + + Names names = (Names) o; + + if (!fullName.equals(names.fullName)) return false; + if (!indexName.equals(names.indexName)) return false; + if (!originalIndexName.equals(names.originalIndexName)) return false; + if (!shortName.equals(names.shortName)) return false; + + return true; + } + + @Override + public int hashCode() { + int result = shortName.hashCode(); + result = 31 * result + indexName.hashCode(); + result = 31 * result + originalIndexName.hashCode(); + result = 31 * result + fullName.hashCode(); + return result; + } + } + + public enum Loading { + LAZY { + @Override + public String toString() { + return LAZY_VALUE; + } + }, + EAGER { + @Override + public String toString() { + return EAGER_VALUE; + } + }, + EAGER_GLOBAL_ORDINALS { + @Override + public String toString() { + return EAGER_GLOBAL_ORDINALS_VALUE; + } + }; + + public static final String KEY = "loading"; + public static final String EAGER_GLOBAL_ORDINALS_VALUE = "eager_global_ordinals"; + public static final String EAGER_VALUE = "eager"; + public static final String LAZY_VALUE = "lazy"; + + public static Loading parse(String loading, Loading defaultValue) { + if (Strings.isNullOrEmpty(loading)) { + return defaultValue; + } else if (EAGER_GLOBAL_ORDINALS_VALUE.equalsIgnoreCase(loading)) { + return EAGER_GLOBAL_ORDINALS; + } else if (EAGER_VALUE.equalsIgnoreCase(loading)) { + return EAGER; + } else if (LAZY_VALUE.equalsIgnoreCase(loading)) { + return LAZY; + } else { + throw new MapperParsingException("Unknown [" + KEY + "] value: [" + loading + "]"); + } + } + } + + private Names names; + private float boost; + // TODO: remove this docvalues flag and use docValuesType + private boolean docValues; + private NamedAnalyzer indexAnalyzer; + private NamedAnalyzer searchAnalyzer; + private NamedAnalyzer searchQuoteAnalyzer; + private SimilarityProvider similarity; + private Loading normsLoading; + private FieldDataType fieldDataType; + + protected MappedFieldType(MappedFieldType ref) { + super(ref); + this.names = ref.names(); + this.boost = ref.boost(); + this.docValues = ref.hasDocValues(); + this.indexAnalyzer = ref.indexAnalyzer(); + this.searchAnalyzer = ref.searchAnalyzer(); + this.searchQuoteAnalyzer = ref.searchQuoteAnalyzer(); + this.similarity = ref.similarity(); + this.normsLoading = ref.normsLoading(); + this.fieldDataType = ref.fieldDataType(); + } + + public MappedFieldType() {} + + public MappedFieldType clone() { + return new MappedFieldType(this); + } + + public boolean isNumeric() { + return false; + } + + public boolean isSortable() { + return true; + } + + public Names names() { + return names; + } + + public void setNames(Names names) { + checkIfFrozen(); + this.names = names; + } + + public float boost() { + return boost; + } + + public void setBoost(float boost) { + checkIfFrozen(); + this.boost = boost; + } + + public FieldDataType fieldDataType() { + return fieldDataType; + } + + public void setFieldDataType(FieldDataType fieldDataType) { + checkIfFrozen(); + this.fieldDataType = fieldDataType; + } + + public boolean hasDocValues() { + return docValues; + } + + public void setHasDocValues(boolean hasDocValues) { + checkIfFrozen(); + this.docValues = hasDocValues; + } + + public Loading normsLoading() { + return normsLoading; + } + + public void setNormsLoading(Loading normsLoading) { + checkIfFrozen(); + this.normsLoading = normsLoading; + } + + public NamedAnalyzer indexAnalyzer() { + return indexAnalyzer; + } + + public void setIndexAnalyzer(NamedAnalyzer analyzer) { + checkIfFrozen(); + this.indexAnalyzer = analyzer; + } + + public NamedAnalyzer searchAnalyzer() { + return searchAnalyzer; + } + + public void setSearchAnalyzer(NamedAnalyzer analyzer) { + checkIfFrozen(); + this.searchAnalyzer = analyzer; + } + + public NamedAnalyzer searchQuoteAnalyzer() { + return searchQuoteAnalyzer == null ? searchAnalyzer : searchQuoteAnalyzer; + } + + public void setSearchQuoteAnalyzer(NamedAnalyzer analyzer) { + checkIfFrozen(); + this.searchQuoteAnalyzer = analyzer; + } + + public SimilarityProvider similarity() { + return similarity; + } + + public void setSimilarity(SimilarityProvider similarity) { + checkIfFrozen(); + this.similarity = similarity; + } + + /** Returns the actual value of the field. */ + public Object value(Object value) { + return value; + } + + /** Returns the value that will be used as a result for search. Can be only of specific types... */ + public Object valueForSearch(Object value) { + return value; + } + + /** Returns the indexed value used to construct search "values". */ + public BytesRef indexedValueForSearch(Object value) { + return BytesRefs.toBytesRef(value); + } + + /** + * Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this + * field in query string. + */ + public boolean useTermQueryWithQueryString() { + return false; + } + + /** Creates a term associated with the field of this mapper for the given value */ + protected Term createTerm(Object value) { + return new Term(names().indexName(), indexedValueForSearch(value)); + } + + public Query termQuery(Object value, @Nullable QueryParseContext context) { + return new TermQuery(createTerm(value)); + } + + public Query termsQuery(List values, @Nullable QueryParseContext context) { + BytesRef[] bytesRefs = new BytesRef[values.size()]; + for (int i = 0; i < bytesRefs.length; i++) { + bytesRefs[i] = indexedValueForSearch(values.get(i)); + } + return new TermsQuery(names.indexName(), bytesRefs); + } + + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return new TermRangeQuery(names().indexName(), + lowerTerm == null ? null : indexedValueForSearch(lowerTerm), + upperTerm == null ? null : indexedValueForSearch(upperTerm), + includeLower, includeUpper); + } + + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + return new FuzzyQuery(createTerm(value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions); + } + + public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + PrefixQuery query = new PrefixQuery(createTerm(value)); + if (method != null) { + query.setRewriteMethod(method); + } + return query; + } + + public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates); + if (method != null) { + query.setRewriteMethod(method); + } + return query; + } + + /** + * @return a {@link FieldStats} instance that maps to the type of this field based on the provided {@link Terms} instance. + */ + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + return new FieldStats.Text( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), terms.getMin(), terms.getMax() + ); + } + + /** A term query to use when parsing a query string. Can return null. */ + @Nullable + public Query queryStringTermQuery(Term term) { + return null; + } +} diff --git a/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b63df2d6cc4..3b223007268 100755 --- a/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -690,8 +690,8 @@ public class MapperService extends AbstractIndexComponent { @Override protected Analyzer getWrappedAnalyzer(String fieldName) { FieldMapper mapper = smartNameFieldMapper(fieldName); - if (mapper != null && mapper.searchAnalyzer() != null) { - return mapper.searchAnalyzer(); + if (mapper != null && mapper.fieldType().searchAnalyzer() != null) { + return mapper.fieldType().searchAnalyzer(); } return defaultAnalyzer; } @@ -709,8 +709,8 @@ public class MapperService extends AbstractIndexComponent { @Override protected Analyzer getWrappedAnalyzer(String fieldName) { FieldMapper mapper = smartNameFieldMapper(fieldName); - if (mapper != null && mapper.searchQuoteAnalyzer() != null) { - return mapper.searchQuoteAnalyzer(); + if (mapper != null && mapper.fieldType().searchQuoteAnalyzer() != null) { + return mapper.fieldType().searchQuoteAnalyzer(); } return defaultAnalyzer; } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java index f7217a5b8e0..db0be598d91 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java @@ -25,28 +25,19 @@ import com.google.common.base.Function; import com.google.common.base.Objects; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; -import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiTermQuery; -import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.RegexpQuery; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -54,6 +45,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -74,14 +66,12 @@ import java.util.List; import java.util.Locale; import java.util.TreeMap; -/** - * - */ +import static org.elasticsearch.index.mapper.core.TypeParsers.DOC_VALUES; + public abstract class AbstractFieldMapper implements FieldMapper { public static class Defaults { - public static final FieldType FIELD_TYPE = new FieldType(); - public static final boolean PRE_2X_DOC_VALUES = false; + public static final MappedFieldType FIELD_TYPE = new MappedFieldType(); static { FIELD_TYPE.setTokenized(true); @@ -89,6 +79,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { FIELD_TYPE.setStoreTermVectors(false); FIELD_TYPE.setOmitNorms(false); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + FIELD_TYPE.setBoost(Defaults.BOOST); FIELD_TYPE.freeze(); } @@ -98,26 +89,21 @@ public abstract class AbstractFieldMapper implements FieldMapper { public abstract static class Builder extends Mapper.Builder { - protected final FieldType fieldType; + protected final MappedFieldType fieldType; private final IndexOptions defaultOptions; protected Boolean docValues; - protected float boost = Defaults.BOOST; protected boolean omitNormsSet = false; protected String indexName; - protected NamedAnalyzer indexAnalyzer; - protected NamedAnalyzer searchAnalyzer; protected Boolean includeInAll; protected boolean indexOptionsSet = false; - protected SimilarityProvider similarity; - protected Loading normsLoading; @Nullable protected Settings fieldDataSettings; protected final MultiFields.Builder multiFieldsBuilder; protected CopyTo copyTo; - protected Builder(String name, FieldType fieldType) { + protected Builder(String name, MappedFieldType fieldType) { super(name); - this.fieldType = fieldType; + this.fieldType = fieldType.clone(); this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable multiFieldsBuilder = new MultiFields.Builder(); } @@ -191,7 +177,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { } public T boost(float boost) { - this.boost = boost; + this.fieldType.setBoost(boost); return builder; } @@ -213,12 +199,12 @@ public abstract class AbstractFieldMapper implements FieldMapper { } public T indexAnalyzer(NamedAnalyzer indexAnalyzer) { - this.indexAnalyzer = indexAnalyzer; + this.fieldType.setIndexAnalyzer(indexAnalyzer); return builder; } public T searchAnalyzer(NamedAnalyzer searchAnalyzer) { - this.searchAnalyzer = searchAnalyzer; + this.fieldType.setSearchAnalyzer(searchAnalyzer); return builder; } @@ -228,12 +214,12 @@ public abstract class AbstractFieldMapper implements FieldMapper { } public T similarity(SimilarityProvider similarity) { - this.similarity = similarity; + this.fieldType.setSimilarity(similarity); return builder; } - public T normsLoading(Loading normsLoading) { - this.normsLoading = normsLoading; + public T normsLoading(MappedFieldType.Loading normsLoading) { + this.fieldType.setNormsLoading(normsLoading); return builder; } @@ -257,8 +243,8 @@ public abstract class AbstractFieldMapper implements FieldMapper { return builder; } - protected Names buildNames(BuilderContext context) { - return new Names(name, buildIndexName(context), buildIndexNameClean(context), buildFullName(context)); + protected MappedFieldType.Names buildNames(BuilderContext context) { + return new MappedFieldType.Names(name, buildIndexName(context), buildIndexNameClean(context), buildFullName(context)); } protected String buildIndexName(BuilderContext context) { @@ -279,136 +265,82 @@ public abstract class AbstractFieldMapper implements FieldMapper { protected String buildFullName(BuilderContext context) { return context.path().fullPathAsText(name); } + + protected void setupFieldType(BuilderContext context) { + fieldType.setNames(buildNames(context)); + } } - protected final Names names; - protected float boost; - protected FieldType fieldType; - protected final Boolean docValues; - protected final NamedAnalyzer indexAnalyzer; - protected NamedAnalyzer searchAnalyzer; - protected final SimilarityProvider similarity; - protected Loading normsLoading; + protected MappedFieldType fieldType; + protected final boolean hasDefaultDocValues; protected Settings customFieldDataSettings; - protected FieldDataType fieldDataType; protected final MultiFields multiFields; protected CopyTo copyTo; protected final boolean indexCreatedBefore2x; - protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer, - NamedAnalyzer searchAnalyzer, SimilarityProvider similarity, - Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings) { - this(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, similarity, - normsLoading, fieldDataSettings, indexSettings, MultiFields.empty(), null); + protected AbstractFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) { + this(fieldType, docValues, fieldDataSettings, indexSettings, MultiFields.empty(), null); } - protected AbstractFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, NamedAnalyzer indexAnalyzer, - NamedAnalyzer searchAnalyzer, SimilarityProvider similarity, - Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + protected AbstractFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { assert indexSettings != null; - this.names = names; - this.boost = boost; - this.fieldType = fieldType; - this.fieldType.freeze(); this.indexCreatedBefore2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0); - - boolean indexedNotAnalyzed = this.fieldType.tokenized() == false && this.fieldType.indexOptions() != IndexOptions.NONE; - if (indexAnalyzer == null && indexedNotAnalyzed) { - this.indexAnalyzer = this.searchAnalyzer = Lucene.KEYWORD_ANALYZER; - } else { - this.indexAnalyzer = indexAnalyzer; - this.searchAnalyzer = searchAnalyzer; - } - - this.similarity = similarity; - this.normsLoading = normsLoading; - this.customFieldDataSettings = fieldDataSettings; + FieldDataType fieldDataType; if (fieldDataSettings == null) { - this.fieldDataType = defaultFieldDataType(); + fieldDataType = defaultFieldDataType(); } else { // create a new field data type, with the default settings as well as the "new ones" - this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(), - Settings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings) + fieldDataType = new FieldDataType(defaultFieldDataType().getType(), + Settings.builder().put(defaultFieldDataType().getSettings()).put(fieldDataSettings) ); } - - if (docValues != null) { - // explicitly set - this.docValues = docValues; - } else if (fieldDataType != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldDataType.getFormat(indexSettings))) { - // convoluted way to enable doc values, should be removed in the future - this.docValues = true; - } else { - this.docValues = null; // use the default + + // TODO: hasDocValues should just be set directly on the field type by callers of this ctor, but + // then we need to eliminate defaultDocValues() (only needed by geo, which needs to be fixed with passing + // doc values setting down to lat/lon) and get rid of specifying doc values in fielddata (which + // complicates whether we can just compare to the default value to know whether to write the setting) + if (docValues == null && fieldDataType != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(fieldDataType.getFormat(indexSettings))) { + docValues = true; } + hasDefaultDocValues = docValues == null; + + this.fieldType = fieldType.clone(); + if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) { + this.fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + this.fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + } + this.fieldType.setHasDocValues(docValues == null ? defaultDocValues() : docValues); + this.fieldType.setFieldDataType(fieldDataType); + this.fieldType.freeze(); + this.multiFields = multiFields; this.copyTo = copyTo; } protected boolean defaultDocValues() { if (indexCreatedBefore2x) { - return Defaults.PRE_2X_DOC_VALUES; + return false; } else { return fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE; } } - @Override - public final boolean hasDocValues() { - return docValues == null ? defaultDocValues() : docValues; - } - @Override public String name() { // TODO: cleanup names so Mapper knows about paths, so that it is always clear whether we are using short or full name - return names.shortName(); + return fieldType.names().shortName(); } - @Override - public Names names() { - return this.names; - } - - public abstract FieldType defaultFieldType(); + public abstract MappedFieldType defaultFieldType(); public abstract FieldDataType defaultFieldDataType(); @Override - public final FieldDataType fieldDataType() { - return fieldDataType; - } - - @Override - public FieldType fieldType() { + public MappedFieldType fieldType() { return fieldType; } - @Override - public float boost() { - return this.boost; - } - - @Override - public Analyzer indexAnalyzer() { - return this.indexAnalyzer; - } - - @Override - public Analyzer searchAnalyzer() { - return this.searchAnalyzer; - } - - @Override - public Analyzer searchQuoteAnalyzer() { - return this.searchAnalyzer; - } - - @Override - public SimilarityProvider similarity() { - return similarity; - } - @Override public CopyTo copyTo() { return copyTo; @@ -421,12 +353,12 @@ public abstract class AbstractFieldMapper implements FieldMapper { parseCreateField(context, fields); for (Field field : fields) { if (!customBoost()) { - field.setBoost(boost); + field.setBoost(fieldType.boost()); } context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e); + throw new MapperParsingException("failed to parse [" + fieldType.names().fullName() + "]", e); } multiFields.parse(this, context); return null; @@ -452,72 +384,59 @@ public abstract class AbstractFieldMapper implements FieldMapper { } @Override - public Object valueForSearch(Object value) { - return value; + public final Object value(Object value) { + return fieldType().value(value); } + @Override + public final Object valueForSearch(Object value) { + return fieldType().valueForSearch(value); + } + + // TODO: this is not final so ParentFieldMapper can have custom behavior, per type... @Override public BytesRef indexedValueForSearch(Object value) { - return BytesRefs.toBytesRef(value); + return fieldType().indexedValueForSearch(value); } @Override - public Query queryStringTermQuery(Term term) { - return null; + public final Query queryStringTermQuery(Term term) { + return fieldType().queryStringTermQuery(term); } @Override - public boolean useTermQueryWithQueryString() { - return false; + public final boolean useTermQueryWithQueryString() { + return fieldType().useTermQueryWithQueryString(); } @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { - return new TermQuery(createTerm(value)); + public final Query termQuery(Object value, @Nullable QueryParseContext context) { + return fieldType().termQuery(value, context); } @Override - public Query termsQuery(List values, @Nullable QueryParseContext context) { - BytesRef[] bytesRefs = new BytesRef[values.size()]; - for (int i = 0; i < bytesRefs.length; i++) { - bytesRefs[i] = indexedValueForSearch(values.get(i)); - } - return new TermsQuery(names.indexName(), bytesRefs); + public final Query termsQuery(List values, @Nullable QueryParseContext context) { + return fieldType().termsQuery(values, context); } @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return new TermRangeQuery(names.indexName(), - lowerTerm == null ? null : indexedValueForSearch(lowerTerm), - upperTerm == null ? null : indexedValueForSearch(upperTerm), - includeLower, includeUpper); + public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return fieldType().rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, context); } @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - return new FuzzyQuery(createTerm(value), fuzziness.asDistance(value), prefixLength, maxExpansions, transpositions); + public final Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + return fieldType().fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions); } @Override - public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { - PrefixQuery query = new PrefixQuery(createTerm(value)); - if (method != null) { - query.setRewriteMethod(method); - } - return query; + public final Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + return fieldType().prefixQuery(value, method, context); } @Override - public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { - RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates); - if (method != null) { - query.setRewriteMethod(method); - } - return query; - } - - protected Term createTerm(Object value) { - return new Term(names.indexName(), indexedValueForSearch(value)); + public final Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + return fieldType().regexpQuery(value, flags, maxDeterminizedStates, method, context); } @Override @@ -532,7 +451,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { if (mergeWith instanceof AbstractFieldMapper) { mergedType = ((AbstractFieldMapper) mergeWith).contentType(); } - mergeResult.addConflict("mapper [" + names.fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]"); // different types, return return; } @@ -540,86 +459,86 @@ public abstract class AbstractFieldMapper implements FieldMapper { boolean indexed = fieldType.indexOptions() != IndexOptions.NONE; boolean mergeWithIndexed = fieldMergeWith.fieldType().indexOptions() != IndexOptions.NONE; if (indexed != mergeWithIndexed || this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different index values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different index values"); } if (this.fieldType().stored() != fieldMergeWith.fieldType().stored()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different store values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store values"); } - if (!this.hasDocValues() && fieldMergeWith.hasDocValues()) { + if (!this.fieldType().hasDocValues() && fieldMergeWith.fieldType().hasDocValues()) { // don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set // when the doc_values field data format is configured - mergeResult.addConflict("mapper [" + names.fullName() + "] has different " + TypeParsers.DOC_VALUES + " values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different " + TypeParsers.DOC_VALUES + " values"); } if (this.fieldType().omitNorms() && !fieldMergeWith.fieldType.omitNorms()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] cannot enable norms (`norms.enabled`)"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] cannot enable norms (`norms.enabled`)"); } if (this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different tokenize values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tokenize values"); } if (this.fieldType().storeTermVectors() != fieldMergeWith.fieldType().storeTermVectors()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector values"); } if (this.fieldType().storeTermVectorOffsets() != fieldMergeWith.fieldType().storeTermVectorOffsets()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_offsets values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_offsets values"); } if (this.fieldType().storeTermVectorPositions() != fieldMergeWith.fieldType().storeTermVectorPositions()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_positions values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_positions values"); } if (this.fieldType().storeTermVectorPayloads() != fieldMergeWith.fieldType().storeTermVectorPayloads()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different store_term_vector_payloads values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_payloads values"); } // null and "default"-named index analyzers both mean the default is used - if (this.indexAnalyzer == null || "default".equals(this.indexAnalyzer.name())) { - if (fieldMergeWith.indexAnalyzer != null && !"default".equals(fieldMergeWith.indexAnalyzer.name())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer"); + if (this.fieldType.indexAnalyzer() == null || "default".equals(this.fieldType.indexAnalyzer().name())) { + if (fieldMergeWith.fieldType.indexAnalyzer() != null && "default".equals(fieldMergeWith.fieldType.indexAnalyzer().name()) == false) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer"); } - } else if (fieldMergeWith.indexAnalyzer == null || "default".equals(fieldMergeWith.indexAnalyzer.name())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer"); - } else if (!this.indexAnalyzer.name().equals(fieldMergeWith.indexAnalyzer.name())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different analyzer"); + } else if (fieldMergeWith.fieldType.indexAnalyzer() == null || "default".equals(fieldMergeWith.fieldType.indexAnalyzer().name())) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer"); + } else if (this.fieldType.indexAnalyzer().name().equals(fieldMergeWith.fieldType.indexAnalyzer().name()) == false) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer"); } - if (!this.names().equals(fieldMergeWith.names())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different index_name"); + if (!this.fieldType().names().equals(fieldMergeWith.fieldType().names())) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different index_name"); } - if (this.similarity == null) { - if (fieldMergeWith.similarity() != null) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity"); + if (this.fieldType.similarity() == null) { + if (fieldMergeWith.fieldType.similarity() != null) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity"); } - } else if (fieldMergeWith.similarity() == null) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity"); - } else if (!this.similarity().equals(fieldMergeWith.similarity())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different similarity"); + } else if (fieldMergeWith.fieldType().similarity() == null) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity"); + } else if (!this.fieldType().similarity().equals(fieldMergeWith.fieldType().similarity())) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity"); } multiFields.merge(mergeWith, mergeResult); if (!mergeResult.simulate()) { // apply changeable values - this.fieldType = new FieldType(this.fieldType); + this.fieldType = this.fieldType.clone(); this.fieldType.setOmitNorms(fieldMergeWith.fieldType.omitNorms()); - this.fieldType.freeze(); - this.boost = fieldMergeWith.boost; - this.normsLoading = fieldMergeWith.normsLoading; - this.copyTo = fieldMergeWith.copyTo; - if (fieldMergeWith.searchAnalyzer != null) { - this.searchAnalyzer = fieldMergeWith.searchAnalyzer; + this.fieldType.setBoost(fieldMergeWith.fieldType.boost()); + this.fieldType.setNormsLoading(fieldMergeWith.fieldType.normsLoading()); + if (fieldMergeWith.fieldType.searchAnalyzer() != null) { + this.fieldType.setSearchAnalyzer(fieldMergeWith.fieldType.searchAnalyzer()); } if (fieldMergeWith.customFieldDataSettings != null) { if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) { this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings; - this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(), - Settings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings) - ); + this.fieldType.setFieldDataType(new FieldDataType(defaultFieldDataType().getType(), + Settings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings) + )); } } + this.fieldType.freeze(); + this.copyTo = fieldMergeWith.copyTo; } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(names.shortName()); + builder.startObject(fieldType.names().shortName()); boolean includeDefaults = params.paramAsBoolean("include_defaults", false); doXContentBody(builder, includeDefaults, params); return builder.endObject(); @@ -628,12 +547,12 @@ public abstract class AbstractFieldMapper implements FieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { builder.field("type", contentType()); - if (indexCreatedBefore2x && (includeDefaults || !names.shortName().equals(names.originalIndexName()))) { - builder.field("index_name", names.originalIndexName()); + if (indexCreatedBefore2x && (includeDefaults || !fieldType.names().shortName().equals(fieldType.names().originalIndexName()))) { + builder.field("index_name", fieldType.names().originalIndexName()); } - if (includeDefaults || boost != 1.0f) { - builder.field("boost", boost); + if (includeDefaults || fieldType.boost() != 1.0f) { + builder.field("boost", fieldType.boost()); } FieldType defaultFieldType = defaultFieldType(); @@ -650,13 +569,13 @@ public abstract class AbstractFieldMapper implements FieldMapper { if (includeDefaults || fieldType.storeTermVectors() != defaultFieldType.storeTermVectors()) { builder.field("term_vector", termVectorOptionsToString(fieldType)); } - if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms() || normsLoading != null) { + if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms() || fieldType.normsLoading() != null) { builder.startObject("norms"); if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms()) { builder.field("enabled", !fieldType.omitNorms()); } - if (normsLoading != null) { - builder.field(Loading.KEY, normsLoading); + if (fieldType.normsLoading() != null) { + builder.field(MappedFieldType.Loading.KEY, fieldType.normsLoading()); } builder.endObject(); } @@ -666,8 +585,8 @@ public abstract class AbstractFieldMapper implements FieldMapper { doXContentAnalyzers(builder, includeDefaults); - if (similarity() != null) { - builder.field("similarity", similarity().name()); + if (fieldType().similarity() != null) { + builder.field("similarity", fieldType().similarity().name()); } else if (includeDefaults) { builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY); } @@ -677,7 +596,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { orderedFielddataSettings.putAll(customFieldDataSettings.getAsMap()); builder.field("fielddata", orderedFielddataSettings); } else if (includeDefaults) { - orderedFielddataSettings.putAll(fieldDataType.getSettings().getAsMap()); + orderedFielddataSettings.putAll(fieldType.fieldDataType().getSettings().getAsMap()); builder.field("fielddata", orderedFielddataSettings); } multiFields.toXContent(builder, params); @@ -688,21 +607,21 @@ public abstract class AbstractFieldMapper implements FieldMapper { } protected void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException { - if (indexAnalyzer == null) { + if (fieldType.indexAnalyzer() == null) { if (includeDefaults) { builder.field("analyzer", "default"); } - } else if (includeDefaults || indexAnalyzer.name().startsWith("_") == false && indexAnalyzer.name().equals("default") == false) { - builder.field("analyzer", indexAnalyzer.name()); - if (searchAnalyzer.name().equals(indexAnalyzer.name()) == false) { - builder.field("search_analyzer", searchAnalyzer.name()); + } else if (includeDefaults || fieldType.indexAnalyzer().name().startsWith("_") == false && fieldType.indexAnalyzer().name().equals("default") == false) { + builder.field("analyzer", fieldType.indexAnalyzer().name()); + if (fieldType.searchAnalyzer().name().equals(fieldType.indexAnalyzer().name()) == false) { + builder.field("search_analyzer", fieldType.searchAnalyzer().name()); } } } protected void doXContentDocValues(XContentBuilder builder, boolean includeDefaults) throws IOException { - if (includeDefaults || docValues != null) { - builder.field(TypeParsers.DOC_VALUES, hasDocValues()); + if (includeDefaults || hasDefaultDocValues == false) { + builder.field(DOC_VALUES, fieldType().hasDocValues()); } } @@ -753,7 +672,6 @@ public abstract class AbstractFieldMapper implements FieldMapper { } } - protected abstract String contentType(); @Override @@ -762,13 +680,13 @@ public abstract class AbstractFieldMapper implements FieldMapper { } @Override - public boolean isNumeric() { - return false; + public final boolean isNumeric() { + return fieldType().isNumeric(); } @Override - public boolean isSortable() { - return true; + public final boolean isSortable() { + return fieldType().isSortable(); } @Override @@ -776,11 +694,6 @@ public abstract class AbstractFieldMapper implements FieldMapper { return true; } - @Override - public Loading normsLoading(Loading defaultLoading) { - return normsLoading == null ? defaultLoading : normsLoading; - } - public static class MultiFields { public static MultiFields empty() { @@ -854,7 +767,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { ContentPath.Type origPathType = context.path().pathType(); context.path().pathType(pathType); - context.path().add(mainField.names().shortName()); + context.path().add(mainField.fieldType().names().shortName()); for (ObjectCursor cursor : mappers.values()) { cursor.value.parse(context); } @@ -871,7 +784,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { for (ObjectCursor cursor : mergeWithMultiField.multiFields.mappers.values()) { FieldMapper mergeWithMapper = cursor.value; - Mapper mergeIntoMapper = mappers.get(mergeWithMapper.names().shortName()); + Mapper mergeIntoMapper = mappers.get(mergeWithMapper.fieldType().names().shortName()); if (mergeIntoMapper == null) { // no mapping, simply add it if not simulating if (!mergeResult.simulate()) { @@ -882,7 +795,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { if (newMappersBuilder == null) { newMappersBuilder = ImmutableOpenMap.builder(mappers); } - newMappersBuilder.put(mergeWithMapper.names().shortName(), mergeWithMapper); + newMappersBuilder.put(mergeWithMapper.fieldType().names().shortName(), mergeWithMapper); if (mergeWithMapper instanceof AbstractFieldMapper) { if (newFieldMappers == null) { newFieldMappers = new ArrayList<>(2); @@ -992,9 +905,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { } @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - return new FieldStats.Text( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), terms.getMin(), terms.getMax() - ); + public final FieldStats stats(Terms terms, int maxDoc) throws IOException { + return fieldType().stats(terms, maxDoc); } } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index 9972ca45cbd..12b053b4b11 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.core; import com.carrotsearch.hppc.ObjectArrayList; - import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DocValuesType; @@ -40,6 +39,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; @@ -63,7 +63,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper { public static class Defaults extends AbstractFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new BinaryFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.NONE); @@ -74,13 +74,15 @@ public class BinaryFieldMapper extends AbstractFieldMapper { public static class Builder extends AbstractFieldMapper.Builder { public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); builder = this; } @Override public BinaryFieldMapper build(BuilderContext context) { - return new BinaryFieldMapper(buildNames(context), fieldType, docValues, + setupFieldType(context); + ((BinaryFieldType)fieldType).tryUncompressing = context.indexCreatedVersion().before(Version.V_2_0_0); + return new BinaryFieldMapper(fieldType, docValues, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } @@ -102,13 +104,67 @@ public class BinaryFieldMapper extends AbstractFieldMapper { } } - protected BinaryFieldMapper(Names names, FieldType fieldType, Boolean docValues, + public static class BinaryFieldType extends MappedFieldType { + protected boolean tryUncompressing = false; + + public BinaryFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected BinaryFieldType(BinaryFieldType ref) { + super(ref); + this.tryUncompressing = ref.tryUncompressing; + } + + @Override + public MappedFieldType clone() { + return new BinaryFieldType(this); + } + + @Override + public BytesReference value(Object value) { + if (value == null) { + return null; + } + + BytesReference bytes; + if (value instanceof BytesRef) { + bytes = new BytesArray((BytesRef) value); + } else if (value instanceof BytesReference) { + bytes = (BytesReference) value; + } else if (value instanceof byte[]) { + bytes = new BytesArray((byte[]) value); + } else { + try { + bytes = new BytesArray(Base64.decode(value.toString())); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to convert bytes", e); + } + } + try { + if (tryUncompressing) { // backcompat behavior + return CompressorFactory.uncompressIfNeeded(bytes); + } else { + return bytes; + } + } catch (IOException e) { + throw new ElasticsearchParseException("failed to decompress source", e); + } + } + + @Override + public Object valueForSearch(Object value) { + return value(value); + } + } + + protected BinaryFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, 1.0f, fieldType, docValues, null, null, null, null, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -117,45 +173,9 @@ public class BinaryFieldMapper extends AbstractFieldMapper { return new FieldDataType("binary"); } - @Override - public Object valueForSearch(Object value) { - return value(value); - } - - @Override - public BytesReference value(Object value) { - if (value == null) { - return null; - } - - BytesReference bytes; - if (value instanceof BytesRef) { - bytes = new BytesArray((BytesRef) value); - } else if (value instanceof BytesReference) { - bytes = (BytesReference) value; - } else if (value instanceof byte[]) { - bytes = new BytesArray((byte[]) value); - } else { - try { - bytes = new BytesArray(Base64.decode(value.toString())); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to convert bytes", e); - } - } - try { - if (indexCreatedBefore2x) { - return CompressorFactory.uncompressIfNeeded(bytes); - } else { - return bytes; - } - } catch (IOException e) { - throw new ElasticsearchParseException("failed to decompress source", e); - } - } - @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - if (!fieldType().stored() && !hasDocValues()) { + if (!fieldType().stored() && !fieldType().hasDocValues()) { return; } byte[] value = context.parseExternalValue(byte[].class); @@ -170,14 +190,14 @@ public class BinaryFieldMapper extends AbstractFieldMapper { return; } if (fieldType().stored()) { - fields.add(new Field(names.indexName(), value, fieldType)); + fields.add(new Field(fieldType().names().indexName(), value, fieldType())); } - if (hasDocValues()) { - CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(names().indexName()); + if (fieldType().hasDocValues()) { + CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field == null) { - field = new CustomBinaryDocValuesField(names().indexName(), value); - context.doc().addWithKey(names().indexName(), field); + field = new CustomBinaryDocValuesField(fieldType().names().indexName(), value); + context.doc().addWithKey(fieldType().names().indexName(), field); } else { field.add(value); } @@ -192,17 +212,11 @@ public class BinaryFieldMapper extends AbstractFieldMapper { public static class CustomBinaryDocValuesField extends NumberFieldMapper.CustomNumericDocValuesField { - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } - private final ObjectArrayList bytesList; private int totalSize = 0; - public CustomBinaryDocValuesField(String name, byte[] bytes) { + public CustomBinaryDocValuesField(String name, byte[] bytes) { super(name); bytesList = new ObjectArrayList<>(); add(bytes); diff --git a/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java index a9850b466b4..cfbcbc45ef1 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.ConstantScoreQuery; @@ -34,6 +33,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -58,12 +58,14 @@ public class BooleanFieldMapper extends AbstractFieldMapper { public static final String CONTENT_TYPE = "boolean"; public static class Defaults extends AbstractFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new BooleanFieldType(); static { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); FIELD_TYPE.freeze(); } @@ -80,7 +82,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper { private Boolean nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); this.builder = this; } @@ -99,8 +101,9 @@ public class BooleanFieldMapper extends AbstractFieldMapper { @Override public BooleanFieldMapper build(BuilderContext context) { - return new BooleanFieldMapper(buildNames(context), boost, fieldType, docValues, nullValue, - similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + return new BooleanFieldMapper(fieldType, docValues, nullValue, + fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } @@ -125,17 +128,86 @@ public class BooleanFieldMapper extends AbstractFieldMapper { } } + public static class BooleanFieldType extends MappedFieldType { + + public BooleanFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected BooleanFieldType(BooleanFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new BooleanFieldType(this); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + if (value == null) { + return Values.FALSE; + } + if (value instanceof Boolean) { + return ((Boolean) value) ? Values.TRUE : Values.FALSE; + } + String sValue; + if (value instanceof BytesRef) { + sValue = ((BytesRef) value).utf8ToString(); + } else { + sValue = value.toString(); + } + if (sValue.length() == 0) { + return Values.FALSE; + } + if (sValue.length() == 1 && sValue.charAt(0) == 'F') { + return Values.FALSE; + } + if (Booleans.parseBoolean(sValue, false)) { + return Values.TRUE; + } + return Values.FALSE; + } + + @Override + public Boolean value(Object value) { + if (value == null) { + return Boolean.FALSE; + } + String sValue = value.toString(); + if (sValue.length() == 0) { + return Boolean.FALSE; + } + if (sValue.length() == 1 && sValue.charAt(0) == 'F') { + return Boolean.FALSE; + } + if (Booleans.parseBoolean(sValue, false)) { + return Boolean.TRUE; + } + return Boolean.FALSE; + } + + @Override + public Object valueForSearch(Object value) { + return value(value); + } + + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + } + private Boolean nullValue; - protected BooleanFieldMapper(Names names, float boost, FieldType fieldType, Boolean docValues, Boolean nullValue, - SimilarityProvider similarity, Loading normsLoading, + protected BooleanFieldMapper(MappedFieldType fieldType, Boolean docValues, Boolean nullValue, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, boost, fieldType, docValues, Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -145,60 +217,6 @@ public class BooleanFieldMapper extends AbstractFieldMapper { return new FieldDataType(CONTENT_TYPE); } - @Override - public boolean useTermQueryWithQueryString() { - return true; - } - - @Override - public Boolean value(Object value) { - if (value == null) { - return Boolean.FALSE; - } - String sValue = value.toString(); - if (sValue.length() == 0) { - return Boolean.FALSE; - } - if (sValue.length() == 1 && sValue.charAt(0) == 'F') { - return Boolean.FALSE; - } - if (Booleans.parseBoolean(sValue, false)) { - return Boolean.TRUE; - } - return Boolean.FALSE; - } - - @Override - public Object valueForSearch(Object value) { - return value(value); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - if (value == null) { - return Values.FALSE; - } - if (value instanceof Boolean) { - return ((Boolean) value) ? Values.TRUE : Values.FALSE; - } - String sValue; - if (value instanceof BytesRef) { - sValue = ((BytesRef) value).utf8ToString(); - } else { - sValue = value.toString(); - } - if (sValue.length() == 0) { - return Values.FALSE; - } - if (sValue.length() == 1 && sValue.charAt(0) == 'F') { - return Values.FALSE; - } - if (Booleans.parseBoolean(sValue, false)) { - return Values.TRUE; - } - return Values.FALSE; - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -209,7 +227,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper { @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !hasDocValues()) { + if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) { return; } @@ -228,9 +246,9 @@ public class BooleanFieldMapper extends AbstractFieldMapper { if (value == null) { return; } - fields.add(new Field(names.indexName(), value ? "T" : "F", fieldType)); - if (hasDocValues()) { - fields.add(new SortedNumericDocValuesField(names.indexName(), value ? 1 : 0)); + fields.add(new Field(fieldType.names().indexName(), value ? "T" : "F", fieldType)); + if (fieldType().hasDocValues()) { + fields.add(new SortedNumericDocValuesField(fieldType.names().indexName(), value ? 1 : 0)); } } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java index b1ad42a4d5c..1a65d58025e 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; @@ -41,13 +40,13 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -66,7 +65,7 @@ public class ByteFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "byte"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new ByteFieldType(); static { FIELD_TYPE.freeze(); @@ -80,7 +79,7 @@ public class ByteFieldMapper extends NumberFieldMapper { protected Byte nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_8_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_8_BIT); builder = this; } @@ -91,14 +90,23 @@ public class ByteFieldMapper extends NumberFieldMapper { @Override public ByteFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - ByteFieldMapper fieldMapper = new ByteFieldMapper(buildNames(context), - fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), - coerce(context), similarity, normsLoading, - fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + ByteFieldMapper fieldMapper = new ByteFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), + coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + String name = precisionStep == Integer.MAX_VALUE ? "_byte/max" : ("_byte/" + precisionStep); + return new NamedAnalyzer(name, new NumericIntegerAnalyzer(precisionStep)); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -122,24 +130,81 @@ public class ByteFieldMapper extends NumberFieldMapper { } } + public static class ByteFieldType extends NumberFieldType { + public ByteFieldType() {} + + protected ByteFieldType(ByteFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new ByteFieldType(this); + } + + @Override + public Byte value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).byteValue(); + } + if (value instanceof BytesRef) { + return ((BytesRef) value).bytes[((BytesRef) value).offset]; + } + return Byte.parseByte(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : (int)parseValue(lowerTerm), + upperTerm == null ? null : (int)parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + byte iValue = Byte.parseByte(value); + byte iSim = fuzziness.asByte(); + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = NumericUtils.getMinInt(terms); + long maxValue = NumericUtils.getMaxInt(terms); + return new FieldStats.Long( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } + private Byte nullValue; private String nullValueAsString; - protected ByteFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, - Byte nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, + protected ByteFieldMapper(MappedFieldType fieldType, Boolean docValues, + Byte nullValue, Explicit ignoreMalformed, Explicit coerce, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, - ignoreMalformed, coerce, new NamedAnalyzer("_byte/" + precisionStep, new NumericIntegerAnalyzer(precisionStep)), - new NamedAnalyzer("_byte/max", new NumericIntegerAnalyzer(Integer.MAX_VALUE)), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -148,33 +213,7 @@ public class ByteFieldMapper extends NumberFieldMapper { return new FieldDataType("byte"); } - @Override - protected int maxPrecisionStep() { - return 32; - } - - @Override - public Byte value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).byteValue(); - } - if (value instanceof BytesRef) { - return ((BytesRef) value).bytes[((BytesRef) value).offset]; - } - return Byte.parseByte(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private byte parseValue(Object value) { + private static byte parseValue(Object value) { if (value instanceof Number) { return ((Number) value).byteValue(); } @@ -184,28 +223,6 @@ public class ByteFieldMapper extends NumberFieldMapper { return Byte.parseByte(value.toString()); } - private int parseValueAsInt(Object value) { - return parseValue(value); - } - - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - byte iValue = Byte.parseByte(value); - byte iSim = fuzziness.asByte(); - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseValueAsInt(lowerTerm), - upperTerm == null ? null : parseValueAsInt(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -222,7 +239,7 @@ public class ByteFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { byte value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -244,7 +261,7 @@ public class ByteFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).byteValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Byte.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Byte.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -255,7 +272,7 @@ public class ByteFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -284,7 +301,7 @@ public class ByteFieldMapper extends NumberFieldMapper { } else { value = (byte) parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } @@ -293,7 +310,7 @@ public class ByteFieldMapper extends NumberFieldMapper { field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -319,8 +336,8 @@ public class ByteFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_8_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_8_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -332,22 +349,13 @@ public class ByteFieldMapper extends NumberFieldMapper { } } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomByteNumericField extends CustomNumericField { private final byte number; private final NumberFieldMapper mapper; - public CustomByteNumericField(NumberFieldMapper mapper, byte number, FieldType fieldType) { + public CustomByteNumericField(NumberFieldMapper mapper, byte number, MappedFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index e8d0cf54990..eeb28e24121 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -25,7 +25,6 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; @@ -39,11 +38,12 @@ import org.elasticsearch.common.xcontent.XContentParser.NumberType; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.search.suggest.completion.AnalyzingCompletionLookupProvider; @@ -72,7 +72,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { public static final String CONTENT_TYPE = "completion"; public static class Defaults extends AbstractFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new CompletionFieldType(); static { FIELD_TYPE.setOmitNorms(true); @@ -114,7 +114,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { private SortedMap contextMapping = ContextMapping.EMPTY_MAPPING; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); builder = this; } @@ -148,7 +148,8 @@ public class CompletionFieldMapper extends AbstractFieldMapper { @Override public CompletionFieldMapper build(Mapper.BuilderContext context) { - return new CompletionFieldMapper(buildNames(context), indexAnalyzer, searchAnalyzer, null, similarity, payloads, + setupFieldType(context); + return new CompletionFieldMapper(fieldType, null, payloads, preserveSeparators, preservePositionIncrements, maxInputLength, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, this.contextMapping); } @@ -219,6 +220,35 @@ public class CompletionFieldMapper extends AbstractFieldMapper { } } + public static class CompletionFieldType extends MappedFieldType { + + public CompletionFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected CompletionFieldType(CompletionFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new CompletionFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public boolean isSortable() { + return false; + } + } + private static final BytesRef EMPTY = new BytesRef(); private PostingsFormat postingsFormat; @@ -236,9 +266,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper { */ // Custom postings formats are deprecated but we still accept a postings format here to be able to test backward compatibility // with older postings formats such as Elasticsearch090 - public CompletionFieldMapper(Names names, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, PostingsFormat wrappedPostingsFormat, SimilarityProvider similarity, boolean payloads, + public CompletionFieldMapper(MappedFieldType fieldType, PostingsFormat wrappedPostingsFormat, boolean payloads, boolean preserveSeparators, boolean preservePositionIncrements, int maxInputLength, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, SortedMap contextMappings) { - super(names, 1.0f, Defaults.FIELD_TYPE, false, indexAnalyzer, searchAnalyzer, similarity, null, null, indexSettings, multiFields, copyTo); + super(fieldType, false, null, indexSettings, multiFields, copyTo); analyzingSuggestLookupProvider = new AnalyzingCompletionLookupProvider(preserveSeparators, false, preservePositionIncrements, payloads); if (wrappedPostingsFormat == null) { // delayed until postingsFormat() is called @@ -424,7 +454,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { + "] at position " + i + " is a reserved character"); } } - return new SuggestField(names.indexName(), ctx, input, this.fieldType, payload, analyzingSuggestLookupProvider); + return new SuggestField(fieldType.names().indexName(), ctx, input, this.fieldType, payload, analyzingSuggestLookupProvider); } public static int correctSubStringLen(String input, int len) { @@ -445,7 +475,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { private final CompletionTokenStream.ToFiniteStrings toFiniteStrings; private final ContextMapping.Context ctx; - public SuggestField(String name, ContextMapping.Context ctx, String value, FieldType type, BytesRef payload, CompletionTokenStream.ToFiniteStrings toFiniteStrings) { + public SuggestField(String name, ContextMapping.Context ctx, String value, MappedFieldType type, BytesRef payload, CompletionTokenStream.ToFiniteStrings toFiniteStrings) { super(name, value, type); this.payload = payload; this.toFiniteStrings = toFiniteStrings; @@ -461,12 +491,12 @@ public class CompletionFieldMapper extends AbstractFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(names().shortName()) + builder.startObject(fieldType().names().shortName()) .field(Fields.TYPE, CONTENT_TYPE); - builder.field(Fields.ANALYZER, indexAnalyzer.name()); - if (indexAnalyzer.name().equals(searchAnalyzer.name()) == false) { - builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), searchAnalyzer.name()); + builder.field(Fields.ANALYZER, fieldType.indexAnalyzer().name()); + if (fieldType.indexAnalyzer().name().equals(fieldType.searchAnalyzer().name()) == false) { + builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), fieldType.searchAnalyzer().name()); } builder.field(Fields.PAYLOADS, this.payloads); builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), this.preserveSeparators); @@ -494,18 +524,13 @@ public class CompletionFieldMapper extends AbstractFieldMapper { return CONTENT_TYPE; } - @Override - public boolean isSortable() { - return false; - } - @Override public boolean supportsNullValue() { return false; } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -514,14 +539,6 @@ public class CompletionFieldMapper extends AbstractFieldMapper { return null; } - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } - public boolean isStoringPayloads() { return payloads; } @@ -531,16 +548,16 @@ public class CompletionFieldMapper extends AbstractFieldMapper { super.merge(mergeWith, mergeResult); CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith; if (payloads != fieldMergeWith.payloads) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different payload values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different payload values"); } if (preservePositionIncrements != fieldMergeWith.preservePositionIncrements) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'preserve_position_increments' values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_position_increments' values"); } if (preserveSeparators != fieldMergeWith.preserveSeparators) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'preserve_separators' values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_separators' values"); } if(!ContextMapping.mappingsAreEqual(getContextMapping(), fieldMergeWith.getContextMapping())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'context_mapping' values"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'context_mapping' values"); } if (!mergeResult.simulate()) { this.maxInputLength = fieldMergeWith.maxInputLength; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index e3842fe474a..b33182d8b17 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Terms; @@ -44,8 +43,10 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericDateAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -53,7 +54,6 @@ import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.search.internal.SearchContext; import org.joda.time.DateTimeZone; @@ -75,37 +75,35 @@ public class DateFieldMapper extends NumberFieldMapper { public static class Defaults extends NumberFieldMapper.Defaults { public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime", Locale.ROOT); - - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; + public static final DateFieldType FIELD_TYPE = new DateFieldType(); static { FIELD_TYPE.freeze(); } public static final String NULL_VALUE = null; - - public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; } public static class Builder extends NumberFieldMapper.Builder { - protected TimeUnit timeUnit = Defaults.TIME_UNIT; - protected String nullValue = Defaults.NULL_VALUE; - protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; - private Locale locale; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); builder = this; // do *NOT* rely on the default locale locale = Locale.ROOT; } + DateFieldType fieldType() { + return (DateFieldType)fieldType; + } + public Builder timeUnit(TimeUnit timeUnit) { - this.timeUnit = timeUnit; + fieldType().setTimeUnit(timeUnit); return this; } @@ -115,28 +113,42 @@ public class DateFieldMapper extends NumberFieldMapper { } public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { - this.dateTimeFormatter = dateTimeFormatter; + fieldType().setDateTimeFormatter(dateTimeFormatter); return this; } @Override public DateFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - if (!locale.equals(dateTimeFormatter.locale())) { - dateTimeFormatter = new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale); - } - DateFieldMapper fieldMapper = new DateFieldMapper(buildNames(context), dateTimeFormatter, - fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, timeUnit, ignoreMalformed(context), coerce(context), - similarity, normsLoading, fieldDataSettings, context.indexSettings(), - multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + DateFieldMapper fieldMapper = new DateFieldMapper(fieldType, + docValues, nullValue, ignoreMalformed(context), coerce(context), + fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + protected void setupFieldType(BuilderContext context) { + FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter; + if (!locale.equals(dateTimeFormatter.locale())) { + fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); + } + super.setupFieldType(context); + } + public Builder locale(Locale locale) { this.locale = locale; return this; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericDateAnalyzer.buildNamedAnalyzer(fieldType().dateTimeFormatter, precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -169,37 +181,222 @@ public class DateFieldMapper extends NumberFieldMapper { } } - protected FormatDateTimeFormatter dateTimeFormatter; + public static class DateFieldType extends NumberFieldType { - private final DateMathParser dateMathParser; + final class LateParsingQuery extends Query { + + final Object lowerTerm; + final Object upperTerm; + final boolean includeLower; + final boolean includeUpper; + final DateTimeZone timeZone; + final DateMathParser forcedDateParser; + + public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser forcedDateParser) { + this.lowerTerm = lowerTerm; + this.upperTerm = upperTerm; + this.includeLower = includeLower; + this.includeUpper = includeUpper; + this.timeZone = timeZone; + this.forcedDateParser = forcedDateParser; + } + + @Override + public Query rewrite(IndexReader reader) throws IOException { + Query query = innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); + return query.rewrite(reader); + } + + @Override + public String toString(String s) { + final StringBuilder sb = new StringBuilder(); + return sb.append(names().indexName()).append(':') + .append(includeLower ? '[' : '{') + .append((lowerTerm == null) ? "*" : lowerTerm.toString()) + .append(" TO ") + .append((upperTerm == null) ? "*" : upperTerm.toString()) + .append(includeUpper ? ']' : '}') + .append(ToStringUtils.boost(getBoost())) + .toString(); + } + } + + protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; + protected TimeUnit timeUnit = Defaults.TIME_UNIT; + protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); + + public DateFieldType() {} + + protected DateFieldType(DateFieldType ref) { + super(ref); + this.dateTimeFormatter = ref.dateTimeFormatter; + this.timeUnit = ref.timeUnit; + this.dateMathParser = ref.dateMathParser; + } + + public DateFieldType clone() { + return new DateFieldType(this); + } + + public FormatDateTimeFormatter dateTimeFormatter() { + return dateTimeFormatter; + } + + public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { + checkIfFrozen(); + this.dateTimeFormatter = dateTimeFormatter; + this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); + } + + public TimeUnit timeUnit() { + return timeUnit; + } + + public void setTimeUnit(TimeUnit timeUnit) { + checkIfFrozen(); + this.timeUnit = timeUnit; + this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); + } + + protected DateMathParser dateMathParser() { + return dateMathParser; + } + + private long parseValue(Object value) { + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + return dateTimeFormatter().parser().parseMillis(((BytesRef) value).utf8ToString()); + } + return dateTimeFormatter().parser().parseMillis(value.toString()); + } + + protected long parseStringValue(String value) { + try { + return dateTimeFormatter().parser().parseMillis(value); + } catch (RuntimeException e) { + try { + return timeUnit().toMillis(Long.parseLong(value)); + } catch (NumberFormatException e1) { + throw new MapperParsingException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter().format() + "], and timestamp number with locale [" + dateTimeFormatter().locale() + "]", e); + } + } + } + + @Override + public Long value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToLong((BytesRef) value); + } + return parseStringValue(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Object valueForSearch(Object value) { + if (value instanceof String) { + // assume its the string that was indexed, just return it... (for example, with get) + return value; + } + Long val = value(value); + if (val == null) { + return null; + } + return dateTimeFormatter().printer().print(val); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + long iValue = dateMathParser().parse(value, now()); + long iSim; + try { + iSim = fuzziness.asTimeValue().millis(); + } catch (Exception e) { + // not a time format + iSim = fuzziness.asLong(); + } + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = NumericUtils.getMinLong(terms); + long maxValue = NumericUtils.getMaxLong(terms); + return new FieldStats.Date( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter() + ); + } + + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, @Nullable QueryParseContext context) { + // If the current search context is null we're parsing percolator query or a index alias filter. + if (SearchContext.current() == null) { + return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); + } else { + return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); + } + } + + private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), + upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), + includeLower, includeUpper); + } + + public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { + if (value instanceof Number) { + return ((Number) value).longValue(); + } + DateMathParser dateParser = dateMathParser(); + if (forcedDateParser != null) { + dateParser = forcedDateParser; + } + String strValue; + if (value instanceof BytesRef) { + strValue = ((BytesRef) value).utf8ToString(); + } else { + strValue = value.toString(); + } + return dateParser.parse(strValue, now(), inclusive, zone); + } + } private String nullValue; - protected final TimeUnit timeUnit; - - protected DateFieldMapper(Names names, FormatDateTimeFormatter dateTimeFormatter, int precisionStep, float boost, FieldType fieldType, Boolean docValues, - String nullValue, TimeUnit timeUnit, Explicit ignoreMalformed,Explicit coerce, - SimilarityProvider similarity, - Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, NumericDateAnalyzer.buildNamedAnalyzer(dateTimeFormatter, precisionStep), - NumericDateAnalyzer.buildNamedAnalyzer(dateTimeFormatter, Integer.MAX_VALUE), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); - this.dateTimeFormatter = dateTimeFormatter; + protected DateFieldMapper(MappedFieldType fieldType, Boolean docValues, String nullValue, Explicit ignoreMalformed,Explicit coerce, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; - this.timeUnit = timeUnit; - this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); - } - - public FormatDateTimeFormatter dateTimeFormatter() { - return dateTimeFormatter; - } - - public DateMathParser dateMathParser() { - return dateMathParser; } @Override - public FieldType defaultFieldType() { + public DateFieldType fieldType() { + return (DateFieldType)fieldType; + } + + @Override + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -208,63 +405,6 @@ public class DateFieldMapper extends NumberFieldMapper { return new FieldDataType("long"); } - @Override - protected int maxPrecisionStep() { - return 64; - } - - @Override - public Long value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToLong((BytesRef) value); - } - return parseStringValue(value.toString()); - } - - /** Dates should return as a string. */ - @Override - public Object valueForSearch(Object value) { - if (value instanceof String) { - // assume its the string that was indexed, just return it... (for example, with get) - return value; - } - Long val = value(value); - if (val == null) { - return null; - } - return dateTimeFormatter.printer().print(val); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private long parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return dateTimeFormatter.parser().parseMillis(((BytesRef) value).utf8ToString()); - } - return dateTimeFormatter.parser().parseMillis(value.toString()); - } - - private String convertToString(Object value) { - if (value instanceof BytesRef) { - return ((BytesRef) value).utf8ToString(); - } - return value.toString(); - } - private static Callable now() { return new Callable() { @Override @@ -277,62 +417,6 @@ public class DateFieldMapper extends NumberFieldMapper { }; } - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - long iValue = dateMathParser.parse(value, now()); - long iSim; - try { - iSim = fuzziness.asTimeValue().millis(); - } catch (Exception e) { - // not a time format - iSim = fuzziness.asLong(); - } - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - public long parseToMilliseconds(Object value) { - return parseToMilliseconds(value, false, null, dateMathParser); - } - - public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - return parseToMilliseconds(convertToString(value), inclusive, zone, forcedDateParser); - } - - public long parseToMilliseconds(String value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { - DateMathParser dateParser = dateMathParser; - if (forcedDateParser != null) { - dateParser = forcedDateParser; - } - return dateParser.parse(value, now(), inclusive, zone); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context); - } - - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, @Nullable QueryParseContext context) { - // If the current search context is null we're parsing percolator query or a index alias filter. - if (SearchContext.current() == null) { - return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); - } else { - return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); - } - } - - private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) { - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseToMilliseconds(lowerTerm, !includeLower, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), - upperTerm == null ? null : parseToMilliseconds(upperTerm, includeUpper, timeZone, forcedDateParser == null ? dateMathParser : forcedDateParser), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -351,7 +435,7 @@ public class DateFieldMapper extends NumberFieldMapper { protected void innerParseCreateField(ParseContext context, List fields) throws IOException { String dateAsString = null; Long value = null; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue instanceof Number) { @@ -398,20 +482,20 @@ public class DateFieldMapper extends NumberFieldMapper { if (dateAsString != null) { assert value == null; if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), dateAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), dateAsString, boost); } - value = parseStringValue(dateAsString); + value = fieldType().parseStringValue(dateAsString); } else if (value != null) { - value = timeUnit.toMillis(value); + value = ((DateFieldType)fieldType).timeUnit().toMillis(value); } if (value != null) { if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType); + CustomLongNumericField field = new CustomLongNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -430,7 +514,9 @@ public class DateFieldMapper extends NumberFieldMapper { } if (!mergeResult.simulate()) { this.nullValue = ((DateFieldMapper) mergeWith).nullValue; - this.dateTimeFormatter = ((DateFieldMapper) mergeWith).dateTimeFormatter; + this.fieldType = this.fieldType.clone(); + fieldType().setDateTimeFormatter(((DateFieldMapper) mergeWith).fieldType().dateTimeFormatter()); + this.fieldType.freeze(); } } @@ -438,10 +524,10 @@ public class DateFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } - builder.field("format", dateTimeFormatter.format()); + builder.field("format", fieldType().dateTimeFormatter().format()); if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); } @@ -451,77 +537,18 @@ public class DateFieldMapper extends NumberFieldMapper { builder.field("include_in_all", false); } - if (includeDefaults || timeUnit != Defaults.TIME_UNIT) { - builder.field("numeric_resolution", timeUnit.name().toLowerCase(Locale.ROOT)); + if (includeDefaults || fieldType().timeUnit() != Defaults.TIME_UNIT) { + builder.field("numeric_resolution", fieldType().timeUnit().name().toLowerCase(Locale.ROOT)); } // only serialize locale if needed, ROOT is the default, so no need to serialize that case as well... - if (dateTimeFormatter.locale() != null && dateTimeFormatter.locale() != Locale.ROOT) { - builder.field("locale", dateTimeFormatter.locale()); + if (fieldType().dateTimeFormatter().locale() != null && fieldType().dateTimeFormatter().locale() != Locale.ROOT) { + builder.field("locale", fieldType().dateTimeFormatter().locale()); } else if (includeDefaults) { - if (dateTimeFormatter.locale() == null) { + if (fieldType().dateTimeFormatter().locale() == null) { builder.field("locale", Locale.ROOT); } else { - builder.field("locale", dateTimeFormatter.locale()); + builder.field("locale", fieldType().dateTimeFormatter().locale()); } } } - - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinLong(terms); - long maxValue = NumericUtils.getMaxLong(terms); - return new FieldStats.Date( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue, dateTimeFormatter - ); - } - - private long parseStringValue(String value) { - try { - return dateTimeFormatter.parser().parseMillis(value); - } catch (RuntimeException e) { - try { - return timeUnit.toMillis(Long.parseLong(value)); - } catch (NumberFormatException e1) { - throw new MapperParsingException("failed to parse date field [" + value + "], tried both date format [" + dateTimeFormatter.format() + "], and timestamp number with locale [" + dateTimeFormatter.locale() + "]", e); - } - } - } - - public final class LateParsingQuery extends Query { - - final Object lowerTerm; - final Object upperTerm; - final boolean includeLower; - final boolean includeUpper; - final DateTimeZone timeZone; - final DateMathParser forcedDateParser; - - public LateParsingQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser forcedDateParser) { - this.lowerTerm = lowerTerm; - this.upperTerm = upperTerm; - this.includeLower = includeLower; - this.includeUpper = includeUpper; - this.timeZone = timeZone; - this.forcedDateParser = forcedDateParser; - } - - @Override - public Query rewrite(IndexReader reader) throws IOException { - Query query = innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser); - return query.rewrite(reader); - } - - @Override - public String toString(String s) { - final StringBuilder sb = new StringBuilder(); - return sb.append(names.indexName()).append(':') - .append(includeLower ? '[' : '{') - .append((lowerTerm == null) ? "*" : lowerTerm.toString()) - .append(" TO ") - .append((upperTerm == null) ? "*" : upperTerm.toString()) - .append(includeUpper ? ']' : '}') - .append(ToStringUtils.boost(getBoost())) - .toString(); - } - } } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java index 48282232d28..a8f4a44e6ab 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java @@ -20,12 +20,10 @@ package org.elasticsearch.index.mapper.core; import com.carrotsearch.hppc.DoubleArrayList; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; @@ -44,21 +42,23 @@ import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericDoubleAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.apache.lucene.util.NumericUtils.doubleToSortableLong; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeDoubleValue; import static org.elasticsearch.index.mapper.MapperBuilders.doubleField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -71,7 +71,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "double"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new DoubleFieldType(); static { FIELD_TYPE.freeze(); @@ -85,7 +85,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { protected Double nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); builder = this; } @@ -96,13 +96,22 @@ public class DoubleFieldMapper extends NumberFieldMapper { @Override public DoubleFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - DoubleFieldMapper fieldMapper = new DoubleFieldMapper(buildNames(context), - fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), - similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + DoubleFieldMapper fieldMapper = new DoubleFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), + fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericDoubleAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -126,24 +135,82 @@ public class DoubleFieldMapper extends NumberFieldMapper { } } + public static class DoubleFieldType extends NumberFieldType { + + public DoubleFieldType() {} + + protected DoubleFieldType(DoubleFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new DoubleFieldType(this); + } + + @Override + public Double value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).doubleValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToDouble((BytesRef) value); + } + return Double.parseDouble(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value)); + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseDoubleValue(lowerTerm), + upperTerm == null ? null : parseDoubleValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + double iValue = Double.parseDouble(value); + double iSim = fuzziness.asDouble(); + return NumericRangeQuery.newDoubleRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + double minValue = NumericUtils.sortableLongToDouble(NumericUtils.getMinLong(terms)); + double maxValue = NumericUtils.sortableLongToDouble(NumericUtils.getMaxLong(terms)); + return new FieldStats.Double( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } private Double nullValue; private String nullValueAsString; - protected DoubleFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, - Double nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, - NumericDoubleAnalyzer.buildNamedAnalyzer(precisionStep), NumericDoubleAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + protected DoubleFieldMapper(MappedFieldType fieldType, Boolean docValues, Double nullValue, Explicit ignoreMalformed, Explicit coerce, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -152,53 +219,8 @@ public class DoubleFieldMapper extends NumberFieldMapper { return new FieldDataType("double"); } - @Override - protected int maxPrecisionStep() { - return 64; - } - - @Override - public Double value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).doubleValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToDouble((BytesRef) value); - } - return Double.parseDouble(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value)); - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - double iValue = Double.parseDouble(value); - double iSim = fuzziness.asDouble(); - return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseDoubleValue(lowerTerm), - upperTerm == null ? null : parseDoubleValue(upperTerm), - includeLower, includeUpper); - } - public Query rangeFilter(Double lowerTerm, Double upperTerm, boolean includeLower, boolean includeUpper) { - return NumericRangeQuery.newDoubleRange(names.indexName(), precisionStep, lowerTerm, upperTerm, includeLower, includeUpper); + return NumericRangeQuery.newDoubleRange(fieldType.names().indexName(), fieldType.numericPrecisionStep(), lowerTerm, upperTerm, includeLower, includeUpper); } @Override @@ -217,7 +239,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { double value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -239,7 +261,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).doubleValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Double.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Double.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -250,7 +272,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -279,26 +301,26 @@ public class DoubleFieldMapper extends NumberFieldMapper { } else { value = parser.doubleValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomDoubleNumericField field = new CustomDoubleNumericField(this, value, fieldType); + CustomDoubleNumericField field = new CustomDoubleNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { if (useSortedNumericDocValues) { - addDocValue(context, fields, NumericUtils.doubleToSortableLong(value)); + addDocValue(context, fields, doubleToSortableLong(value)); } else { - CustomDoubleNumericDocValuesField field = (CustomDoubleNumericDocValuesField) context.doc().getByKey(names().indexName()); + CustomDoubleNumericDocValuesField field = (CustomDoubleNumericDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field != null) { field.add(value); } else { - field = new CustomDoubleNumericDocValuesField(names().indexName(), value); - context.doc().addWithKey(names().indexName(), field); + field = new CustomDoubleNumericDocValuesField(fieldType().names().indexName(), value); + context.doc().addWithKey(fieldType().names().indexName(), field); } } } @@ -325,8 +347,8 @@ public class DoubleFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -339,22 +361,13 @@ public class DoubleFieldMapper extends NumberFieldMapper { } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - double minValue = NumericUtils.sortableLongToDouble(NumericUtils.getMinLong(terms)); - double maxValue = NumericUtils.sortableLongToDouble(NumericUtils.getMaxLong(terms)); - return new FieldStats.Double( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomDoubleNumericField extends CustomNumericField { private final double number; private final NumberFieldMapper mapper; - public CustomDoubleNumericField(NumberFieldMapper mapper, double number, FieldType fieldType) { + public CustomDoubleNumericField(NumberFieldMapper mapper, double number, NumberFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; @@ -376,12 +389,6 @@ public class DoubleFieldMapper extends NumberFieldMapper { public static class CustomDoubleNumericDocValuesField extends CustomNumericDocValuesField { - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } - private final DoubleArrayList values; public CustomDoubleNumericDocValuesField(String name, double value) { diff --git a/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java index 4a3f6c2e3fc..50c5f578a48 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java @@ -20,11 +20,9 @@ package org.elasticsearch.index.mapper.core; import com.carrotsearch.hppc.FloatArrayList; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; @@ -45,21 +43,23 @@ import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericFloatAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.apache.lucene.util.NumericUtils.floatToSortableInt; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; import static org.elasticsearch.index.mapper.MapperBuilders.floatField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -72,7 +72,7 @@ public class FloatFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "float"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new FloatFieldType(); static { FIELD_TYPE.freeze(); @@ -86,7 +86,7 @@ public class FloatFieldMapper extends NumberFieldMapper { protected Float nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); builder = this; } @@ -97,13 +97,22 @@ public class FloatFieldMapper extends NumberFieldMapper { @Override public FloatFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - FloatFieldMapper fieldMapper = new FloatFieldMapper(buildNames(context), - fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), - similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + FloatFieldMapper fieldMapper = new FloatFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), + fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericFloatAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -127,23 +136,83 @@ public class FloatFieldMapper extends NumberFieldMapper { } } + public static class FloatFieldType extends NumberFieldType { + + public FloatFieldType() {} + + protected FloatFieldType(FloatFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new FloatFieldType(this); + } + + @Override + public Float value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).floatValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToFloat((BytesRef) value); + } + return Float.parseFloat(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + int intValue = NumericUtils.floatToSortableInt(parseValue(value)); + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseValue(lowerTerm), + upperTerm == null ? null : parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + float iValue = Float.parseFloat(value); + final float iSim = fuzziness.asFloat(); + return NumericRangeQuery.newFloatRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + float minValue = NumericUtils.sortableIntToFloat(NumericUtils.getMinInt(terms)); + float maxValue = NumericUtils.sortableIntToFloat(NumericUtils.getMaxInt(terms)); + return new FieldStats.Float( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } + private Float nullValue; private String nullValueAsString; - protected FloatFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected FloatFieldMapper(MappedFieldType fieldType, Boolean docValues, Float nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, - NumericFloatAnalyzer.buildNamedAnalyzer(precisionStep), NumericFloatAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -152,34 +221,7 @@ public class FloatFieldMapper extends NumberFieldMapper { return new FieldDataType("float"); } - @Override - protected int maxPrecisionStep() { - return 32; - } - - @Override - public Float value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).floatValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToFloat((BytesRef) value); - } - return Float.parseFloat(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - int intValue = NumericUtils.floatToSortableInt(parseValue(value)); - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private float parseValue(Object value) { + private static float parseValue(Object value) { if (value instanceof Number) { return ((Number) value).floatValue(); } @@ -189,24 +231,6 @@ public class FloatFieldMapper extends NumberFieldMapper { return Float.parseFloat(value.toString()); } - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - float iValue = Float.parseFloat(value); - final float iSim = fuzziness.asFloat(); - return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -223,7 +247,7 @@ public class FloatFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { float value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -245,7 +269,7 @@ public class FloatFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).floatValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Float.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Float.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -256,7 +280,7 @@ public class FloatFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -285,26 +309,26 @@ public class FloatFieldMapper extends NumberFieldMapper { } else { value = parser.floatValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomFloatNumericField field = new CustomFloatNumericField(this, value, fieldType); + CustomFloatNumericField field = new CustomFloatNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { if (useSortedNumericDocValues) { - addDocValue(context, fields, NumericUtils.floatToSortableInt(value)); + addDocValue(context, fields, floatToSortableInt(value)); } else { - CustomFloatNumericDocValuesField field = (CustomFloatNumericDocValuesField) context.doc().getByKey(names().indexName()); + CustomFloatNumericDocValuesField field = (CustomFloatNumericDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field != null) { field.add(value); } else { - field = new CustomFloatNumericDocValuesField(names().indexName(), value); - context.doc().addWithKey(names().indexName(), field); + field = new CustomFloatNumericDocValuesField(fieldType().names().indexName(), value); + context.doc().addWithKey(fieldType().names().indexName(), field); } } } @@ -332,8 +356,8 @@ public class FloatFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_32_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -346,22 +370,13 @@ public class FloatFieldMapper extends NumberFieldMapper { } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - float minValue = NumericUtils.sortableIntToFloat(NumericUtils.getMinInt(terms)); - float maxValue = NumericUtils.sortableIntToFloat(NumericUtils.getMaxInt(terms)); - return new FieldStats.Float( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomFloatNumericField extends CustomNumericField { private final float number; private final NumberFieldMapper mapper; - public CustomFloatNumericField(NumberFieldMapper mapper, float number, FieldType fieldType) { + public CustomFloatNumericField(NumberFieldMapper mapper, float number, NumberFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; @@ -383,12 +398,6 @@ public class FloatFieldMapper extends NumberFieldMapper { public static class CustomFloatNumericDocValuesField extends CustomNumericDocValuesField { - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } - private final FloatArrayList values; public CustomFloatNumericDocValuesField(String name, float value) { diff --git a/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java index 5f05e72b4d3..893722d5a0a 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; @@ -40,15 +39,16 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -67,7 +67,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "integer"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new IntegerFieldType(); static { FIELD_TYPE.freeze(); @@ -81,7 +81,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { protected Integer nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); builder = this; } @@ -92,13 +92,23 @@ public class IntegerFieldMapper extends NumberFieldMapper { @Override public IntegerFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - IntegerFieldMapper fieldMapper = new IntegerFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, - nullValue, ignoreMalformed(context), coerce(context), similarity, normsLoading, fieldDataSettings, + setupFieldType(context); + IntegerFieldMapper fieldMapper = new IntegerFieldMapper(fieldType, docValues, + nullValue, ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -122,23 +132,83 @@ public class IntegerFieldMapper extends NumberFieldMapper { } } + public static class IntegerFieldType extends NumberFieldType { + + public IntegerFieldType() {} + + protected IntegerFieldType(IntegerFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new IntegerFieldType(this); + } + + @Override + public Integer value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).intValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToInt((BytesRef) value); + } + return Integer.parseInt(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseValue(lowerTerm), + upperTerm == null ? null : parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + int iValue = Integer.parseInt(value); + int iSim = fuzziness.asInt(); + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = NumericUtils.getMinInt(terms); + long maxValue = NumericUtils.getMaxInt(terms); + return new FieldStats.Long( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } + private Integer nullValue; private String nullValueAsString; - protected IntegerFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected IntegerFieldMapper(MappedFieldType fieldType, Boolean docValues, Integer nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, - NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep), NumericIntegerAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -147,33 +217,9 @@ public class IntegerFieldMapper extends NumberFieldMapper { return new FieldDataType("int"); } - @Override - protected int maxPrecisionStep() { - return 32; - } - @Override - public Integer value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).intValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToInt((BytesRef) value); - } - return Integer.parseInt(value.toString()); - } - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private int parseValue(Object value) { + private static int parseValue(Object value) { if (value instanceof Number) { return ((Number) value).intValue(); } @@ -183,24 +229,6 @@ public class IntegerFieldMapper extends NumberFieldMapper { return Integer.parseInt(value.toString()); } - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - int iValue = Integer.parseInt(value); - int iSim = fuzziness.asInt(); - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -217,7 +245,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { int value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -239,7 +267,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).intValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Integer.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Integer.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -250,7 +278,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -279,7 +307,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { } else { value = parser.intValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } @@ -288,11 +316,11 @@ public class IntegerFieldMapper extends NumberFieldMapper { protected void addIntegerFields(ParseContext context, List fields, int value, float boost) { if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomIntegerNumericField field = new CustomIntegerNumericField(this, value, fieldType); + CustomIntegerNumericField field = new CustomIntegerNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -322,8 +350,8 @@ public class IntegerFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_32_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -336,22 +364,13 @@ public class IntegerFieldMapper extends NumberFieldMapper { } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomIntegerNumericField extends CustomNumericField { private final int number; private final NumberFieldMapper mapper; - public CustomIntegerNumericField(NumberFieldMapper mapper, int number, FieldType fieldType) { + public CustomIntegerNumericField(NumberFieldMapper mapper, int number, MappedFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java index cd9b9ca5df1..ccf20f976f1 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/LongFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; @@ -40,15 +39,16 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericLongAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -67,7 +67,7 @@ public class LongFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "long"; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new LongFieldType(); static { FIELD_TYPE.freeze(); @@ -81,7 +81,7 @@ public class LongFieldMapper extends NumberFieldMapper { protected Long nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); builder = this; } @@ -92,13 +92,22 @@ public class LongFieldMapper extends NumberFieldMapper { @Override public LongFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - LongFieldMapper fieldMapper = new LongFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, - ignoreMalformed(context), coerce(context), similarity, normsLoading, - fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + LongFieldMapper fieldMapper = new LongFieldMapper(fieldType, docValues, nullValue, + ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericLongAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -122,23 +131,83 @@ public class LongFieldMapper extends NumberFieldMapper { } } + public static class LongFieldType extends NumberFieldType { + + public LongFieldType() {} + + protected LongFieldType(LongFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new LongFieldType(this); + } + + @Override + public Long value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToLong((BytesRef) value); + } + return Long.parseLong(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseLongValue(lowerTerm), + upperTerm == null ? null : parseLongValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + long iValue = Long.parseLong(value); + final long iSim = fuzziness.asLong(); + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = NumericUtils.getMinLong(terms); + long maxValue = NumericUtils.getMaxLong(terms); + return new FieldStats.Long( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } + private Long nullValue; private String nullValueAsString; - protected LongFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected LongFieldMapper(MappedFieldType fieldType, Boolean docValues, Long nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, - NumericLongAnalyzer.buildNamedAnalyzer(precisionStep), NumericLongAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -147,50 +216,6 @@ public class LongFieldMapper extends NumberFieldMapper { return new FieldDataType("long"); } - @Override - protected int maxPrecisionStep() { - return 64; - } - - @Override - public Long value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToLong((BytesRef) value); - } - return Long.parseLong(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - long iValue = Long.parseLong(value); - final long iSim = fuzziness.asLong(); - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseLongValue(lowerTerm), - upperTerm == null ? null : parseLongValue(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -207,7 +232,7 @@ public class LongFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { long value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -229,7 +254,7 @@ public class LongFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).longValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Long.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Long.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -240,7 +265,7 @@ public class LongFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -269,16 +294,16 @@ public class LongFieldMapper extends NumberFieldMapper { } else { value = parser.longValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType); + CustomLongNumericField field = new CustomLongNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -304,8 +329,8 @@ public class LongFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -317,22 +342,13 @@ public class LongFieldMapper extends NumberFieldMapper { } } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinLong(terms); - long maxValue = NumericUtils.getMaxLong(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomLongNumericField extends CustomNumericField { private final long number; private final NumberFieldMapper mapper; - public CustomLongNumericField(NumberFieldMapper mapper, long number, FieldType fieldType) { + public CustomLongNumericField(NumberFieldMapper mapper, long number, MappedFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapper.java index 7c9c920a3c6..9401081a79f 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapper.java @@ -20,13 +20,16 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.NumericDateAnalyzer; +import org.elasticsearch.index.analysis.NumericLongAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; @@ -36,7 +39,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.murmur3Field; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -50,20 +52,30 @@ public class Murmur3FieldMapper extends LongFieldMapper { public static class Builder extends NumberFieldMapper.Builder { public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Integer.MAX_VALUE); + super(name, Defaults.FIELD_TYPE, Integer.MAX_VALUE); builder = this; builder.precisionStep(Integer.MAX_VALUE); } @Override public Murmur3FieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, null, - ignoreMalformed(context), coerce(context), similarity, normsLoading, + setupFieldType(context); + Murmur3FieldMapper fieldMapper = new Murmur3FieldMapper(fieldType, docValues, null, + ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericLongAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -92,13 +104,12 @@ public class Murmur3FieldMapper extends LongFieldMapper { } } - protected Murmur3FieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected Murmur3FieldMapper(MappedFieldType fieldType, Boolean docValues, Long nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, nullValue, ignoreMalformed, coerce, - similarity, normsLoading, fieldDataSettings, - indexSettings, multiFields, copyTo); + super(fieldType, docValues, nullValue, ignoreMalformed, coerce, + fieldDataSettings, indexSettings, multiFields, copyTo); } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java index ecb2e742e79..9d2dddb3db0 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/NumberFieldMapper.java @@ -31,9 +31,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; -import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; @@ -45,14 +43,13 @@ import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; -import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.io.Reader; @@ -70,16 +67,6 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A public static final int PRECISION_STEP_32_BIT = 8; // 4tpv public static final int PRECISION_STEP_64_BIT = 16; // 4tpv - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); - - static { - FIELD_TYPE.setTokenized(false); - FIELD_TYPE.setOmitNorms(true); - FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - FIELD_TYPE.setStoreTermVectors(false); - FIELD_TYPE.freeze(); - } - public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); public static final Explicit COERCE = new Explicit<>(true, false); } @@ -90,9 +77,9 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A private Boolean coerce; - public Builder(String name, FieldType fieldType, int defaultPrecisionStep) { + public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) { super(name, fieldType); - fieldType.setNumericPrecisionStep(defaultPrecisionStep); + this.fieldType.setNumericPrecisionStep(defaultPrecisionStep); } public T precisionStep(int precisionStep) { @@ -129,10 +116,60 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A } return Defaults.COERCE; } - + + protected void setupFieldType(BuilderContext context) { + super.setupFieldType(context); + fieldType.setOmitNorms(fieldType.omitNorms() && fieldType.boost() == 1.0f); + int precisionStep = fieldType.numericPrecisionStep(); + if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) { + fieldType.setNumericPrecisionStep(Integer.MAX_VALUE); + } + fieldType.setIndexAnalyzer(makeNumberAnalyzer(fieldType.numericPrecisionStep())); + fieldType.setSearchAnalyzer(makeNumberAnalyzer(Integer.MAX_VALUE)); + } + + protected abstract NamedAnalyzer makeNumberAnalyzer(int precisionStep); + + protected abstract int maxPrecisionStep(); } - protected int precisionStep; + public static abstract class NumberFieldType extends MappedFieldType { + + public NumberFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + setTokenized(false); + setOmitNorms(true); + setIndexOptions(IndexOptions.DOCS); + setStoreTermVectors(false); + } + + protected NumberFieldType(NumberFieldType ref) { + super(ref); + } + + public abstract NumberFieldType clone(); + + @Override + public abstract Object value(Object value); + + @Override + public Object valueForSearch(Object value) { + return value(value); + } + + @Override + public abstract Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions); + + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + + @Override + public boolean isNumeric() { + return true; + } + } protected Boolean includeInAll; @@ -151,7 +188,7 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A private ThreadLocal tokenStream = new ThreadLocal() { @Override protected NumericTokenStream initialValue() { - return new NumericTokenStream(precisionStep); + return new NumericTokenStream(fieldType.numericPrecisionStep()); } }; @@ -183,23 +220,14 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A } }; - protected NumberFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, - Explicit ignoreMalformed, Explicit coerce, NamedAnalyzer indexAnalyzer, - NamedAnalyzer searchAnalyzer, SimilarityProvider similarity, - Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, + protected NumberFieldMapper(MappedFieldType fieldType, Boolean docValues, + Explicit ignoreMalformed, Explicit coerce, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { // LUCENE 4 UPGRADE: Since we can't do anything before the super call, we have to push the boost check down to subclasses - super(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); - if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) { - this.precisionStep = Integer.MAX_VALUE; - } else { - this.precisionStep = precisionStep; - } + super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo); this.ignoreMalformed = ignoreMalformed; this.coerce = coerce; - Version v = Version.indexCreated(indexSettings); - this.useSortedNumericDocValues = v.onOrAfter(Version.V_1_4_0_Beta1); + this.useSortedNumericDocValues = Version.indexCreated(indexSettings).onOrAfter(Version.V_1_4_0_Beta1); } @Override @@ -221,12 +249,6 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A includeInAll = null; } - protected abstract int maxPrecisionStep(); - - public int precisionStep() { - return this.precisionStep; - } - @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { RuntimeException e = null; @@ -247,41 +269,22 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A protected final void addDocValue(ParseContext context, List fields, long value) { if (useSortedNumericDocValues) { - fields.add(new SortedNumericDocValuesField(names().indexName(), value)); + fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value)); } else { - CustomLongNumericDocValuesField field = (CustomLongNumericDocValuesField) context.doc().getByKey(names().indexName()); + CustomLongNumericDocValuesField field = (CustomLongNumericDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field != null) { field.add(value); } else { - field = new CustomLongNumericDocValuesField(names().indexName(), value); - context.doc().addWithKey(names().indexName(), field); + field = new CustomLongNumericDocValuesField(fieldType().names().indexName(), value); + context.doc().addWithKey(fieldType().names().indexName(), field); } } } - /** - * Use the field query created here when matching on numbers. - */ - @Override - public boolean useTermQueryWithQueryString() { - return true; - } - - @Override - public final Query termQuery(Object value, @Nullable QueryParseContext context) { - return new TermQuery(new Term(names.indexName(), indexedValueForSearch(value))); - } - - @Override - public abstract Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context); - - @Override - public abstract Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions); - /** * Converts an object value into a double */ - public double parseDoubleValue(Object value) { + public static double parseDoubleValue(Object value) { if (value instanceof Number) { return ((Number) value).doubleValue(); } @@ -296,7 +299,7 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A /** * Converts an object value into a long */ - public long parseLongValue(Object value) { + public static long parseLongValue(Object value) { if (value instanceof Number) { return ((Number) value).longValue(); } @@ -308,16 +311,6 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A return Long.parseLong(value.toString()); } - /** - * Override the default behavior (to return the string, and return the actual Number instance). - * - * @param value - */ - @Override - public Object valueForSearch(Object value) { - return value(value); - } - @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { super.merge(mergeWith, mergeResult); @@ -326,7 +319,9 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A } if (!mergeResult.simulate()) { NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith; - this.precisionStep = nfmMergeWith.precisionStep; + this.fieldType = this.fieldType.clone(); + this.fieldType.setNumericPrecisionStep(nfmMergeWith.fieldType.numericPrecisionStep()); + this.fieldType.freeze(); this.includeInAll = nfmMergeWith.includeInAll; if (nfmMergeWith.ignoreMalformed.explicit()) { this.ignoreMalformed = nfmMergeWith.ignoreMalformed; @@ -342,13 +337,13 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A } protected NumericTokenStream popCachedStream() { - if (precisionStep == 4) { + if (fieldType.numericPrecisionStep() == 4) { return tokenStream4.get(); - } else if (precisionStep == 8) { + } else if (fieldType.numericPrecisionStep() == 8) { return tokenStream8.get(); - } else if (precisionStep == 16) { + } else if (fieldType.numericPrecisionStep() == 16) { return tokenStream16.get(); - } else if (precisionStep == Integer.MAX_VALUE) { + } else if (fieldType.numericPrecisionStep() == Integer.MAX_VALUE) { return tokenStreamMax.get(); } return tokenStream.get(); @@ -359,8 +354,8 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A protected final NumberFieldMapper mapper; - public CustomNumericField(NumberFieldMapper mapper, Number value, FieldType fieldType) { - super(mapper.names().indexName(), fieldType); + public CustomNumericField(NumberFieldMapper mapper, Number value, MappedFieldType fieldType) { + super(mapper.fieldType().names().indexName(), fieldType); this.mapper = mapper; if (value != null) { this.fieldsData = value; @@ -431,13 +426,8 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A } - public static class CustomLongNumericDocValuesField extends CustomNumericDocValuesField { - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } + public static class CustomLongNumericDocValuesField extends CustomNumericDocValuesField { private final LongArrayList values; @@ -481,9 +471,4 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A builder.field("coerce", coerce.value()); } } - - @Override - public boolean isNumeric() { - return true; - } } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java index f195862ac74..382f30eab40 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; @@ -43,13 +42,13 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -69,7 +68,7 @@ public class ShortFieldMapper extends NumberFieldMapper { public static final int DEFAULT_PRECISION_STEP = 8; public static class Defaults extends NumberFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new ShortFieldType(); static { FIELD_TYPE.freeze(); @@ -83,7 +82,7 @@ public class ShortFieldMapper extends NumberFieldMapper { protected Short nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), DEFAULT_PRECISION_STEP); + super(name, Defaults.FIELD_TYPE, DEFAULT_PRECISION_STEP); builder = this; } @@ -94,13 +93,24 @@ public class ShortFieldMapper extends NumberFieldMapper { @Override public ShortFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - ShortFieldMapper fieldMapper = new ShortFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, - ignoreMalformed(context), coerce(context), similarity, normsLoading, fieldDataSettings, + setupFieldType(context); + ShortFieldMapper fieldMapper = new ShortFieldMapper(fieldType, docValues, nullValue, + ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + String name = precisionStep == Integer.MAX_VALUE ? "_short/max" : ("_short/" + precisionStep); + return new NamedAnalyzer(name, new NumericIntegerAnalyzer(precisionStep)); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -124,23 +134,84 @@ public class ShortFieldMapper extends NumberFieldMapper { } } + public static class ShortFieldType extends NumberFieldType { + + public ShortFieldType() {} + + protected ShortFieldType(ShortFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new ShortFieldType(this); + } + + @Override + public Short value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).shortValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToShort((BytesRef) value); + } + return Short.parseShort(value.toString()); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : (int)parseValue(lowerTerm), + upperTerm == null ? null : (int)parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + short iValue = Short.parseShort(value); + short iSim = fuzziness.asShort(); + return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + + @Override + public FieldStats stats(Terms terms, int maxDoc) throws IOException { + long minValue = NumericUtils.getMinInt(terms); + long maxValue = NumericUtils.getMaxInt(terms); + return new FieldStats.Long( + maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue + ); + } + } + private Short nullValue; private String nullValueAsString; - protected ShortFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected ShortFieldMapper(MappedFieldType fieldType, Boolean docValues, Short nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, new NamedAnalyzer("_short/" + precisionStep, - new NumericIntegerAnalyzer(precisionStep)), new NamedAnalyzer("_short/max", new NumericIntegerAnalyzer(Integer.MAX_VALUE)), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, ignoreMalformed, coerce, + fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -149,33 +220,7 @@ public class ShortFieldMapper extends NumberFieldMapper { return new FieldDataType("short"); } - @Override - protected int maxPrecisionStep() { - return 32; - } - - @Override - public Short value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).shortValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToShort((BytesRef) value); - } - return Short.parseShort(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private short parseValue(Object value) { + private static short parseValue(Object value) { if (value instanceof Number) { return ((Number) value).shortValue(); } @@ -185,28 +230,6 @@ public class ShortFieldMapper extends NumberFieldMapper { return Short.parseShort(value.toString()); } - private int parseValueAsInt(Object value) { - return parseValue(value); - } - - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - short iValue = Short.parseShort(value); - short iSim = fuzziness.asShort(); - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseValueAsInt(lowerTerm), - upperTerm == null ? null : parseValueAsInt(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -223,7 +246,7 @@ public class ShortFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { short value; - float boost = this.boost; + float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { @@ -245,7 +268,7 @@ public class ShortFieldMapper extends NumberFieldMapper { value = ((Number) externalValue).shortValue(); } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), Short.toString(value), boost); + context.allEntries().addText(fieldType.names().fullName(), Short.toString(value), boost); } } else { XContentParser parser = context.parser(); @@ -256,7 +279,7 @@ public class ShortFieldMapper extends NumberFieldMapper { } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(names.fullName(), nullValueAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; @@ -285,16 +308,16 @@ public class ShortFieldMapper extends NumberFieldMapper { } else { value = parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), parser.text(), boost); + context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - CustomShortNumericField field = new CustomShortNumericField(this, value, fieldType); + CustomShortNumericField field = new CustomShortNumericField(this, value, (NumberFieldType)fieldType); field.setBoost(boost); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -320,8 +343,8 @@ public class ShortFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != DEFAULT_PRECISION_STEP) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != DEFAULT_PRECISION_STEP) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); @@ -334,22 +357,13 @@ public class ShortFieldMapper extends NumberFieldMapper { } - @Override - public FieldStats stats(Terms terms, int maxDoc) throws IOException { - long minValue = NumericUtils.getMinInt(terms); - long maxValue = NumericUtils.getMaxInt(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue - ); - } - public static class CustomShortNumericField extends CustomNumericField { private final short number; private final NumberFieldMapper mapper; - public CustomShortNumericField(NumberFieldMapper mapper, short number, FieldType fieldType) { + public CustomShortNumericField(NumberFieldMapper mapper, short number, NumberFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; diff --git a/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 5dfffc83809..0fd77fe5875 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -19,9 +19,7 @@ package org.elasticsearch.index.mapper.core; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; @@ -34,19 +32,20 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.internal.AllFieldMapper; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.apache.lucene.index.IndexOptions.NONE; import static org.elasticsearch.index.mapper.MapperBuilders.stringField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; @@ -59,7 +58,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa public static final String CONTENT_TYPE = "string"; public static class Defaults extends AbstractFieldMapper.Defaults { - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new StringFieldType(); static { FIELD_TYPE.freeze(); @@ -77,12 +76,10 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa protected int positionOffsetGap = Defaults.POSITION_OFFSET_GAP; - protected NamedAnalyzer searchQuotedAnalyzer; - protected int ignoreAbove = Defaults.IGNORE_ABOVE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); builder = this; } @@ -94,9 +91,6 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa @Override public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) { super.searchAnalyzer(searchAnalyzer); - if (searchQuotedAnalyzer == null) { - searchQuotedAnalyzer = searchAnalyzer; - } return this; } @@ -106,7 +100,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa } public Builder searchQuotedAnalyzer(NamedAnalyzer analyzer) { - this.searchQuotedAnalyzer = analyzer; + this.fieldType.setSearchQuoteAnalyzer(analyzer); return builder; } @@ -118,20 +112,20 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa @Override public StringFieldMapper build(BuilderContext context) { if (positionOffsetGap > 0) { - indexAnalyzer = new NamedAnalyzer(indexAnalyzer, positionOffsetGap); - searchAnalyzer = new NamedAnalyzer(searchAnalyzer, positionOffsetGap); - searchQuotedAnalyzer = new NamedAnalyzer(searchQuotedAnalyzer, positionOffsetGap); + fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionOffsetGap)); + fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionOffsetGap)); + fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionOffsetGap)); } // if the field is not analyzed, then by default, we should omit norms and have docs only // index options, as probably what the user really wants // if they are set explicitly, we will use those values // we also change the values on the default field type so that toXContent emits what // differs from the defaults - FieldType defaultFieldType = new FieldType(Defaults.FIELD_TYPE); + MappedFieldType defaultFieldType = Defaults.FIELD_TYPE.clone(); if (fieldType.indexOptions() != IndexOptions.NONE && !fieldType.tokenized()) { defaultFieldType.setOmitNorms(true); defaultFieldType.setIndexOptions(IndexOptions.DOCS); - if (!omitNormsSet && boost == Defaults.BOOST) { + if (!omitNormsSet && fieldType.boost() == Defaults.BOOST) { fieldType.setOmitNorms(true); } if (!indexOptionsSet) { @@ -139,9 +133,9 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa } } defaultFieldType.freeze(); - StringFieldMapper fieldMapper = new StringFieldMapper(buildNames(context), - boost, fieldType, defaultFieldType, docValues, nullValue, indexAnalyzer, searchAnalyzer, searchQuotedAnalyzer, - positionOffsetGap, ignoreAbove, similarity, normsLoading, + setupFieldType(context); + StringFieldMapper fieldMapper = new StringFieldMapper( + fieldType, defaultFieldType, docValues, nullValue, positionOffsetGap, ignoreAbove, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; @@ -174,14 +168,14 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa builder.positionOffsetGap(XContentMapValues.nodeIntegerValue(propNode, -1)); // we need to update to actual analyzers if they are not set in this case... // so we can inject the position offset gap... - if (builder.indexAnalyzer == null) { - builder.indexAnalyzer = parserContext.analysisService().defaultIndexAnalyzer(); + if (builder.fieldType.indexAnalyzer() == null) { + builder.fieldType.setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer()); } - if (builder.searchAnalyzer == null) { - builder.searchAnalyzer = parserContext.analysisService().defaultSearchAnalyzer(); + if (builder.fieldType.searchAnalyzer() == null) { + builder.fieldType.setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer()); } - if (builder.searchQuotedAnalyzer == null) { - builder.searchQuotedAnalyzer = parserContext.analysisService().defaultSearchQuoteAnalyzer(); + if (builder.fieldType.searchQuoteAnalyzer() == null) { + builder.fieldType.setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer()); } iterator.remove(); } else if (propName.equals("ignore_above")) { @@ -195,32 +189,50 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa } } + public static class StringFieldType extends MappedFieldType { + + public StringFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected StringFieldType(StringFieldType ref) { + super(ref); + } + + public StringFieldType clone() { + return new StringFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + } + private String nullValue; private Boolean includeInAll; private int positionOffsetGap; - private NamedAnalyzer searchQuotedAnalyzer; private int ignoreAbove; - private final FieldType defaultFieldType; + private final MappedFieldType defaultFieldType; - protected StringFieldMapper(Names names, float boost, FieldType fieldType, FieldType defaultFieldType, Boolean docValues, - String nullValue, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, - NamedAnalyzer searchQuotedAnalyzer, int positionOffsetGap, int ignoreAbove, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + protected StringFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, Boolean docValues, + String nullValue, int positionOffsetGap, int ignoreAbove, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, boost, fieldType, docValues, indexAnalyzer, searchAnalyzer, - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); - if (fieldType.tokenized() && fieldType.indexOptions() != IndexOptions.NONE && hasDocValues()) { - throw new MapperParsingException("Field [" + names.fullName() + "] cannot be analyzed and have doc values"); + super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo); + if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { + throw new MapperParsingException("Field [" + fieldType.names().fullName() + "] cannot be analyzed and have doc values"); } this.defaultFieldType = defaultFieldType; this.nullValue = nullValue; this.positionOffsetGap = positionOffsetGap; - this.searchQuotedAnalyzer = searchQuotedAnalyzer != null ? searchQuotedAnalyzer : this.searchAnalyzer; this.ignoreAbove = ignoreAbove; } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return defaultFieldType; } @@ -248,14 +260,6 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa includeInAll = null; } - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } - @Override protected boolean customBoost() { return true; @@ -269,11 +273,6 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa return ignoreAbove; } - @Override - public Analyzer searchQuoteAnalyzer() { - return this.searchQuotedAnalyzer; - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -284,7 +283,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - ValueAndBoost valueAndBoost = parseCreateFieldForString(context, nullValue, boost); + ValueAndBoost valueAndBoost = parseCreateFieldForString(context, nullValue, fieldType.boost()); if (valueAndBoost.value() == null) { return; } @@ -292,19 +291,19 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa return; } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), valueAndBoost.value(), valueAndBoost.boost()); + context.allEntries().addText(fieldType.names().fullName(), valueAndBoost.value(), valueAndBoost.boost()); } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - Field field = new Field(names.indexName(), valueAndBoost.value(), fieldType); + Field field = new Field(fieldType.names().indexName(), valueAndBoost.value(), fieldType); field.setBoost(valueAndBoost.boost()); fields.add(field); } - if (hasDocValues()) { - fields.add(new SortedSetDocValuesField(names.indexName(), new BytesRef(valueAndBoost.value()))); + if (fieldType().hasDocValues()) { + fields.add(new SortedSetDocValuesField(fieldType.names().indexName(), new BytesRef(valueAndBoost.value()))); } if (fields.isEmpty()) { - context.ignoredValue(names.indexName(), valueAndBoost.value()); + context.ignoredValue(fieldType.names().indexName(), valueAndBoost.value()); } } @@ -381,13 +380,14 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa if (includeDefaults || positionOffsetGap != Defaults.POSITION_OFFSET_GAP) { builder.field("position_offset_gap", positionOffsetGap); } - if (searchQuotedAnalyzer != null && !searchQuotedAnalyzer.name().equals(searchAnalyzer.name())) { - builder.field("search_quote_analyzer", searchQuotedAnalyzer.name()); + NamedAnalyzer searchQuoteAnalyzer = fieldType.searchQuoteAnalyzer(); + if (searchQuoteAnalyzer != null && !searchQuoteAnalyzer.name().equals(fieldType.searchAnalyzer().name())) { + builder.field("search_quote_analyzer", searchQuoteAnalyzer.name()); } else if (includeDefaults) { - if (searchQuotedAnalyzer == null) { + if (searchQuoteAnalyzer == null) { builder.field("search_quote_analyzer", "default"); } else { - builder.field("search_quote_analyzer", searchQuotedAnalyzer.name()); + builder.field("search_quote_analyzer", searchQuoteAnalyzer.name()); } } if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) { diff --git a/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java index c13268b1988..eb53c172df4 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapper.java @@ -22,18 +22,18 @@ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.IndexOptions; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost; import org.elasticsearch.index.similarity.SimilarityProvider; @@ -43,6 +43,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.apache.lucene.index.IndexOptions.NONE; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; import static org.elasticsearch.index.mapper.MapperBuilders.tokenCountField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; @@ -55,6 +56,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { public static final String CONTENT_TYPE = "token_count"; public static class Defaults extends IntegerFieldMapper.Defaults { + } public static class Builder extends NumberFieldMapper.Builder { @@ -62,7 +64,7 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { private NamedAnalyzer analyzer; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); builder = this; } @@ -82,13 +84,23 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { @Override public TokenCountFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, - ignoreMalformed(context), coerce(context), similarity, normsLoading, fieldDataSettings, context.indexSettings(), + setupFieldType(context); + TokenCountFieldMapper fieldMapper = new TokenCountFieldMapper(fieldType, docValues, nullValue, + ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), analyzer, multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -122,34 +134,33 @@ public class TokenCountFieldMapper extends IntegerFieldMapper { private NamedAnalyzer analyzer; - protected TokenCountFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, Integer nullValue, - Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, Settings fieldDataSettings, Settings indexSettings, NamedAnalyzer analyzer, + protected TokenCountFieldMapper(MappedFieldType fieldType, Boolean docValues, Integer nullValue, + Explicit ignoreMalformed, Explicit coerce, Settings fieldDataSettings, Settings indexSettings, NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, nullValue, ignoreMalformed, coerce, - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, nullValue, ignoreMalformed, coerce, + fieldDataSettings, indexSettings, multiFields, copyTo); this.analyzer = analyzer; } @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { - ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, boost); + ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, fieldType.boost()); if (valueAndBoost.value() == null && nullValue() == null) { return; } - if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored() || hasDocValues()) { + if (fieldType.indexOptions() != NONE || fieldType.stored() || fieldType().hasDocValues()) { int count; if (valueAndBoost.value() == null) { count = nullValue(); } else { - count = countPositions(analyzer.analyzer().tokenStream(names().shortName(), valueAndBoost.value())); + count = countPositions(analyzer.analyzer().tokenStream(fieldType().names().shortName(), valueAndBoost.value())); } addIntegerFields(context, fields, count, valueAndBoost.boost()); } if (fields.isEmpty()) { - context.ignoredValue(names.indexName(), valueAndBoost.value()); + context.ignoredValue(fieldType.names().indexName(), valueAndBoost.value()); } } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java b/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java index 983b83ce054..d7434a574dd 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/TypeParsers.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.object.ObjectMapper; diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index db0c9899c15..4485aed953b 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -24,10 +24,9 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import com.google.common.base.Objects; import com.google.common.collect.Iterators; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -41,10 +40,9 @@ import org.elasticsearch.common.util.ByteUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -56,7 +54,6 @@ import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.object.ArrayValueMapperParser; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.ArrayList; @@ -97,7 +94,6 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal public static class Defaults { public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL; - public static final boolean STORE = false; public static final boolean ENABLE_LATLON = false; public static final boolean ENABLE_GEOHASH = false; public static final boolean ENABLE_GEOHASH_PREFIX = false; @@ -107,7 +103,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal public static final boolean VALIDATE_LAT = true; public static final boolean VALIDATE_LON = true; - public static final FieldType FIELD_TYPE = new FieldType(StringFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); @@ -131,16 +127,15 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal private int geoHashPrecision = Defaults.GEO_HASH_PRECISION; - boolean validateLat = Defaults.VALIDATE_LAT; - boolean validateLon = Defaults.VALIDATE_LON; - boolean normalizeLat = Defaults.NORMALIZE_LAT; - boolean normalizeLon = Defaults.NORMALIZE_LON; - public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); this.builder = this; } + GeoPointFieldType fieldType() { + return (GeoPointFieldType)fieldType; + } + @Override public Builder multiFieldPathType(ContentPath.Type pathType) { this.pathType = pathType; @@ -185,6 +180,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal DoubleFieldMapper latMapper = null; DoubleFieldMapper lonMapper = null; + GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType; context.path().add(name); if (enableLatLon) { @@ -196,10 +192,13 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); + geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType()); } StringFieldMapper geohashMapper = null; - if (enableGeoHash) { + if (enableGeoHash || enableGeohashPrefix) { + // TODO: possible also implicitly enable geohash if geohash precision is set geohashMapper = stringField(Names.GEOHASH).index(true).tokenized(false).includeInAll(false).omitNorms(true).indexOptions(IndexOptions.DOCS).build(context); + geoPointFieldType.setGeohashEnabled(geohashMapper.fieldType(), geoHashPrecision, enableGeohashPrefix); } context.path().remove(); @@ -208,11 +207,11 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal // this is important: even if geo points feel like they need to be tokenized to distinguish lat from lon, we actually want to // store them as a single token. fieldType.setTokenized(false); + fieldType.setHasDocValues(false); + setupFieldType(context); - return new GeoPointFieldMapper(buildNames(context), fieldType, docValues, indexAnalyzer, searchAnalyzer, - similarity, fieldDataSettings, context.indexSettings(), origPathType, enableLatLon, enableGeoHash, enableGeohashPrefix, precisionStep, - geoHashPrecision, latMapper, lonMapper, geohashMapper, validateLon, validateLat, normalizeLon, normalizeLat - , multiFieldsBuilder.build(this, context)); + return new GeoPointFieldMapper(fieldType, docValues, fieldDataSettings, context.indexSettings(), origPathType, + latMapper, lonMapper, geohashMapper, multiFieldsBuilder.build(this, context)); } } @@ -251,24 +250,24 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } iterator.remove(); } else if (fieldName.equals("validate")) { - builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode); - builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode)); + builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("validate_lon")) { - builder.validateLon = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("validate_lat")) { - builder.validateLat = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("normalize")) { - builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode); - builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode)); + builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("normalize_lat")) { - builder.normalizeLat = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("normalize_lon")) { - builder.normalizeLon = XContentMapValues.nodeBooleanValue(fieldNode); + builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode)); iterator.remove(); } else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) { iterator.remove(); @@ -278,6 +277,128 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } } + public static class GeoPointFieldType extends MappedFieldType { + + private MappedFieldType geohashFieldType; + private int geohashPrecision; + private boolean geohashPrefixEnabled; + + private MappedFieldType latFieldType; + private MappedFieldType lonFieldType; + private boolean validateLon = true; + private boolean validateLat = true; + private boolean normalizeLon = true; + private boolean normalizeLat = true; + + public GeoPointFieldType() { + super(StringFieldMapper.Defaults.FIELD_TYPE); + } + + protected GeoPointFieldType(GeoPointFieldType ref) { + super(ref); + this.geohashFieldType = ref.geohashFieldType; // copying ref is ok, this can never be modified + this.geohashPrecision = ref.geohashPrecision; + this.geohashPrefixEnabled = ref.geohashPrefixEnabled; + this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified + this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified + this.validateLon = ref.validateLon; + this.validateLat = ref.validateLat; + this.normalizeLon = ref.normalizeLon; + this.normalizeLat = ref.normalizeLat; + } + + @Override + public MappedFieldType clone() { + return new GeoPointFieldType(this); + } + + public boolean isGeohashEnabled() { + return geohashFieldType != null; + } + + public MappedFieldType geohashFieldType() { + return geohashFieldType; + } + + public int geohashPrecision() { + return geohashPrecision; + } + + public boolean isGeohashPrefixEnabled() { + return geohashPrefixEnabled; + } + + public void setGeohashEnabled(MappedFieldType geohashFieldType, int geohashPrecision, boolean geohashPrefixEnabled) { + checkIfFrozen(); + this.geohashFieldType = geohashFieldType; + this.geohashPrecision = geohashPrecision; + this.geohashPrefixEnabled = geohashPrefixEnabled; + } + + public boolean isLatLonEnabled() { + return latFieldType != null; + } + + public MappedFieldType latFieldType() { + return latFieldType; + } + + public MappedFieldType lonFieldType() { + return lonFieldType; + } + + public void setLatLonEnabled(MappedFieldType latFieldType, MappedFieldType lonFieldType) { + checkIfFrozen(); + this.latFieldType = latFieldType; + this.lonFieldType = lonFieldType; + } + + public boolean validateLon() { + return validateLon; + } + + public void setValidateLon(boolean validateLon) { + checkIfFrozen(); + this.validateLon = validateLon; + } + + public boolean validateLat() { + return validateLat; + } + + public void setValidateLat(boolean validateLat) { + checkIfFrozen(); + this.validateLat = validateLat; + } + + public boolean normalizeLon() { + return normalizeLon; + } + + public void setNormalizeLon(boolean normalizeLon) { + checkIfFrozen(); + this.normalizeLon = normalizeLon; + } + + public boolean normalizeLat() { + return normalizeLat; + } + + public void setNormalizeLat(boolean normalizeLat) { + checkIfFrozen(); + this.normalizeLat = normalizeLat; + } + + @Override + public GeoPoint value(Object value) { + if (value instanceof GeoPoint) { + return (GeoPoint) value; + } else { + return GeoPoint.parseFromLatLon(value.toString()); + } + } + } + /** * A byte-aligned fixed-length encoding for latitudes and longitudes. */ @@ -405,52 +526,19 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal private final ContentPath.Type pathType; - private final boolean enableLatLon; - - private final boolean enableGeoHash; - - private final boolean enableGeohashPrefix; - - private final Integer precisionStep; - - private final int geoHashPrecision; - private final DoubleFieldMapper latMapper; private final DoubleFieldMapper lonMapper; private final StringFieldMapper geohashMapper; - private boolean validateLon; - private boolean validateLat; - - private final boolean normalizeLon; - private final boolean normalizeLat; - - public GeoPointFieldMapper(FieldMapper.Names names, FieldType fieldType, Boolean docValues, - NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, - SimilarityProvider similarity, @Nullable Settings fieldDataSettings, Settings indexSettings, - ContentPath.Type pathType, boolean enableLatLon, boolean enableGeoHash, boolean enableGeohashPrefix, Integer precisionStep, int geoHashPrecision, - DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, - boolean validateLon, boolean validateLat, - boolean normalizeLon, boolean normalizeLat, MultiFields multiFields) { - super(names, 1f, fieldType, docValues, null, indexAnalyzer, similarity, null, fieldDataSettings, indexSettings, multiFields, null); + public GeoPointFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings, + ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,MultiFields multiFields) { + super(fieldType, docValues, fieldDataSettings, indexSettings, multiFields, null); this.pathType = pathType; - this.enableLatLon = enableLatLon; - this.enableGeoHash = enableGeoHash || enableGeohashPrefix; // implicitly enable geohashes if geohash_prefix is set - this.enableGeohashPrefix = enableGeohashPrefix; - this.precisionStep = precisionStep; - this.geoHashPrecision = geoHashPrecision; - this.latMapper = latMapper; this.lonMapper = lonMapper; this.geohashMapper = geohashMapper; - - this.validateLat = validateLat; - this.validateLon = validateLon; - - this.normalizeLat = normalizeLat; - this.normalizeLon = normalizeLon; } @Override @@ -459,7 +547,12 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } @Override - public FieldType defaultFieldType() { + public GeoPointFieldType fieldType() { + return (GeoPointFieldType)fieldType; + } + + @Override + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -473,39 +566,6 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal return false; } - public DoubleFieldMapper latMapper() { - return latMapper; - } - - public DoubleFieldMapper lonMapper() { - return lonMapper; - } - - public StringFieldMapper geoHashStringMapper() { - return this.geohashMapper; - } - - int geoHashPrecision() { - return geoHashPrecision; - } - - public boolean isEnableLatLon() { - return enableLatLon; - } - - public boolean isEnableGeohashPrefix() { - return enableGeohashPrefix; - } - - @Override - public GeoPoint value(Object value) { - if (value instanceof GeoPoint) { - return (GeoPoint) value; - } else { - return GeoPoint.parseFromLatLon(value.toString()); - } - } - @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called"); @@ -515,7 +575,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal public Mapper parse(ParseContext context) throws IOException { ContentPath.Type origPathType = context.path().pathType(); context.path().pathType(pathType); - context.path().add(names().shortName()); + context.path().add(fieldType().names().shortName()); GeoPoint sparse = context.parseExternalValue(GeoPoint.class); @@ -565,9 +625,9 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal return null; } - private void parseGeohashField(ParseContext context, String geohash) throws IOException { - int len = Math.min(geoHashPrecision, geohash.length()); - int min = enableGeohashPrefix ? 1 : geohash.length(); + private void addGeohashField(ParseContext context, String geohash) throws IOException { + int len = Math.min(fieldType().geohashPrecision(), geohash.length()); + int min = fieldType().isGeohashPrefixEnabled() ? 1 : geohash.length(); for (int i = len; i >= min; i--) { // side effect of this call is adding the field @@ -584,40 +644,40 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException { - if (normalizeLat || normalizeLon) { - GeoUtils.normalizePoint(point, normalizeLat, normalizeLon); + if (fieldType().normalizeLat() || fieldType().normalizeLon()) { + GeoUtils.normalizePoint(point, fieldType().normalizeLat(), fieldType().normalizeLon()); } - if (validateLat) { + if (fieldType().validateLat()) { if (point.lat() > 90.0 || point.lat() < -90.0) { throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); } } - if (validateLon) { + if (fieldType().validateLon()) { if (point.lon() > 180.0 || point.lon() < -180) { throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - Field field = new Field(names.indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType); + Field field = new Field(fieldType.names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType); context.doc().add(field); } - if (enableGeoHash) { + if (fieldType().isGeohashEnabled()) { if (geohash == null) { geohash = GeoHashUtils.encode(point.lat(), point.lon()); } - parseGeohashField(context, geohash); + addGeohashField(context, geohash); } - if (enableLatLon) { + if (fieldType().isLatLonEnabled()) { latMapper.parse(context.createExternalValueContext(point.lat())); lonMapper.parse(context.createExternalValueContext(point.lon())); } - if (hasDocValues()) { - CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(names().indexName()); + if (fieldType().hasDocValues()) { + CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field == null) { - field = new CustomGeoPointDocValuesField(names().indexName(), point.lat(), point.lon()); - context.doc().addWithKey(names().indexName(), field); + field = new CustomGeoPointDocValuesField(fieldType().names().indexName(), point.lat(), point.lon()); + context.doc().addWithKey(fieldType().names().indexName(), field); } else { field.add(point.lat(), point.lon()); } @@ -647,42 +707,43 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } GeoPointFieldMapper fieldMergeWith = (GeoPointFieldMapper) mergeWith; - if (this.enableLatLon != fieldMergeWith.enableLatLon) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different lat_lon"); + if (this.fieldType().isLatLonEnabled() != fieldMergeWith.fieldType().isLatLonEnabled()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different lat_lon"); } - if (this.enableGeoHash != fieldMergeWith.enableGeoHash) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash"); + if (this.fieldType().isGeohashEnabled() != fieldMergeWith.fieldType().isGeohashEnabled()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash"); } - if (this.geoHashPrecision != fieldMergeWith.geoHashPrecision) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash_precision"); + if (this.fieldType().geohashPrecision() != fieldMergeWith.fieldType().geohashPrecision()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash_precision"); } - if (this.enableGeohashPrefix != fieldMergeWith.enableGeohashPrefix) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different geohash_prefix"); + if (this.fieldType().isGeohashPrefixEnabled() != fieldMergeWith.fieldType().isGeohashPrefixEnabled()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash_prefix"); } - if (this.normalizeLat != fieldMergeWith.normalizeLat) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different normalize_lat"); + if (this.fieldType().normalizeLat() != fieldMergeWith.fieldType().normalizeLat()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different normalize_lat"); } - if (this.normalizeLon != fieldMergeWith.normalizeLon) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different normalize_lon"); + if (this.fieldType().normalizeLon() != fieldMergeWith.fieldType().normalizeLon()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different normalize_lon"); } - if (!Objects.equal(this.precisionStep, fieldMergeWith.precisionStep)) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different precision_step"); + if (fieldType().isLatLonEnabled() && + this.fieldType().latFieldType().numericPrecisionStep() != fieldMergeWith.fieldType().latFieldType().numericPrecisionStep()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different precision_step"); } - if (this.validateLat != fieldMergeWith.validateLat) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different validate_lat"); + if (this.fieldType().validateLat() != fieldMergeWith.fieldType().validateLat()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different validate_lat"); } - if (this.validateLon != fieldMergeWith.validateLon) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different validate_lon"); + if (this.fieldType().validateLon() != fieldMergeWith.fieldType().validateLon()) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different validate_lon"); } } @Override public Iterator iterator() { List extras = new ArrayList<>(); - if (enableGeoHash) { + if (fieldType().isGeohashEnabled()) { extras.add(geohashMapper); } - if (enableLatLon) { + if (fieldType().isLatLonEnabled()) { extras.add(latMapper); extras.add(lonMapper); } @@ -695,46 +756,46 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal if (includeDefaults || pathType != Defaults.PATH_TYPE) { builder.field("path", pathType.name().toLowerCase(Locale.ROOT)); } - if (includeDefaults || enableLatLon != Defaults.ENABLE_LATLON) { - builder.field("lat_lon", enableLatLon); + if (includeDefaults || fieldType().isLatLonEnabled() != Defaults.ENABLE_LATLON) { + builder.field("lat_lon", fieldType().isLatLonEnabled()); } - if (includeDefaults || enableGeoHash != Defaults.ENABLE_GEOHASH) { - builder.field("geohash", enableGeoHash); + if (includeDefaults || fieldType().isGeohashEnabled() != Defaults.ENABLE_GEOHASH) { + builder.field("geohash", fieldType().isGeohashEnabled()); } - if (includeDefaults || enableGeohashPrefix != Defaults.ENABLE_GEOHASH_PREFIX) { - builder.field("geohash_prefix", enableGeohashPrefix); + if (includeDefaults || fieldType().isGeohashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) { + builder.field("geohash_prefix", fieldType().isGeohashPrefixEnabled()); } - if (includeDefaults || geoHashPrecision != Defaults.GEO_HASH_PRECISION) { - builder.field("geohash_precision", geoHashPrecision); + if (fieldType().isGeohashEnabled() && (includeDefaults || fieldType().geohashPrecision() != Defaults.GEO_HASH_PRECISION)) { + builder.field("geohash_precision", fieldType().geohashPrecision()); } - if (includeDefaults || precisionStep != null) { - builder.field("precision_step", precisionStep); + if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) { + builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep()); } - if (includeDefaults || validateLat != Defaults.VALIDATE_LAT || validateLon != Defaults.VALIDATE_LON) { - if (validateLat && validateLon) { + if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT || fieldType().validateLon() != Defaults.VALIDATE_LON) { + if (fieldType().validateLat() && fieldType().validateLon()) { builder.field("validate", true); - } else if (!validateLat && !validateLon) { + } else if (!fieldType().validateLat() && !fieldType().validateLon()) { builder.field("validate", false); } else { - if (includeDefaults || validateLat != Defaults.VALIDATE_LAT) { - builder.field("validate_lat", validateLat); + if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT) { + builder.field("validate_lat", fieldType().validateLat()); } - if (includeDefaults || validateLon != Defaults.VALIDATE_LON) { - builder.field("validate_lon", validateLon); + if (includeDefaults || fieldType().validateLon() != Defaults.VALIDATE_LON) { + builder.field("validate_lon", fieldType().validateLon()); } } } - if (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT || normalizeLon != Defaults.NORMALIZE_LON) { - if (normalizeLat && normalizeLon) { + if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) { + if (fieldType().normalizeLat() && fieldType().normalizeLon()) { builder.field("normalize", true); - } else if (!normalizeLat && !normalizeLon) { + } else if (!fieldType().normalizeLat() && !fieldType().normalizeLon()) { builder.field("normalize", false); } else { - if (includeDefaults || normalizeLat != Defaults.NORMALIZE_LAT) { - builder.field("normalize_lat", normalizeLat); + if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT) { + builder.field("normalize_lat", fieldType().normalizeLat()); } - if (includeDefaults || normalizeLon != Defaults.NORMALIZE_LON) { - builder.field("normalize_lon", normalizeLat); + if (includeDefaults || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) { + builder.field("normalize_lon", fieldType().normalizeLon()); } } } @@ -742,15 +803,9 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal public static class CustomGeoPointDocValuesField extends CustomNumericDocValuesField { - public static final FieldType TYPE = new FieldType(); - static { - TYPE.setDocValuesType(DocValuesType.BINARY); - TYPE.freeze(); - } - private final ObjectHashSet points; - public CustomGeoPointDocValuesField(String name, double lat, double lon) { + public CustomGeoPointDocValuesField(String name, double lat, double lon) { super(name); points = new ObjectHashSet<>(2); points.add(new GeoPoint(lat, lon)); diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 42a4fb287ea..9468088c982 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.geo; import com.spatial4j.core.shape.Shape; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; @@ -40,10 +39,11 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; @@ -94,7 +94,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { public static final double DISTANCE_ERROR_PCT = 0.025d; public static final Orientation ORIENTATION = Orientation.RIGHT; - public static final FieldType FIELD_TYPE = new FieldType(); + public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); @@ -119,7 +119,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { private SpatialPrefixTree prefixTree; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); } public Builder tree(String tree) { @@ -155,7 +155,6 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { @Override public GeoShapeFieldMapper build(BuilderContext context) { - final FieldMapper.Names names = buildNames(context); if (Names.TREE_GEOHASH.equals(tree)) { prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true)); } else if (Names.TREE_QUADTREE.equals(tree)) { @@ -169,9 +168,19 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } else { throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]"); } + setupFieldType(context); - return new GeoShapeFieldMapper(names, prefixTree, strategyName, distanceErrorPct, orientation, fieldType, - context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + RecursivePrefixTreeStrategy recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, fieldType.names().indexName()); + recursiveStrategy.setDistErrPct(distanceErrorPct); + recursiveStrategy.setPruneLeafyBranches(false); + TermQueryPrefixTreeStrategy termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, fieldType.names().indexName()); + termStrategy.setDistErrPct(distanceErrorPct); + + GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType; + geoShapeFieldType.setStrategies(strategyName, recursiveStrategy, termStrategy); + geoShapeFieldType.setOrientation(orientation); + + return new GeoShapeFieldMapper(fieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } private final int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) { @@ -223,25 +232,83 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } } - private final PrefixTreeStrategy defaultStrategy; - private final RecursivePrefixTreeStrategy recursiveStrategy; - private final TermQueryPrefixTreeStrategy termStrategy; - private Orientation shapeOrientation; + public static class GeoShapeFieldType extends MappedFieldType { - public GeoShapeFieldMapper(FieldMapper.Names names, SpatialPrefixTree tree, String defaultStrategyName, double distanceErrorPct, - Orientation shapeOrientation, FieldType fieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, 1, fieldType, false, null, null, null, null, null, indexSettings, multiFields, copyTo); - this.recursiveStrategy = new RecursivePrefixTreeStrategy(tree, names.indexName()); - this.recursiveStrategy.setDistErrPct(distanceErrorPct); - this.recursiveStrategy.setPruneLeafyBranches(false); - this.termStrategy = new TermQueryPrefixTreeStrategy(tree, names.indexName()); - this.termStrategy.setDistErrPct(distanceErrorPct); - this.defaultStrategy = resolveStrategy(defaultStrategyName); - this.shapeOrientation = shapeOrientation; + private PrefixTreeStrategy defaultStrategy; + private RecursivePrefixTreeStrategy recursiveStrategy; + private TermQueryPrefixTreeStrategy termStrategy; + private Orientation orientation; + + public GeoShapeFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected GeoShapeFieldType(GeoShapeFieldType ref) { + super(ref); + // TODO: this shallow copy is probably not good...need to extract the parameters and recreate the tree and strategies? + this.defaultStrategy = ref.defaultStrategy; + this.recursiveStrategy = ref.recursiveStrategy; + this.termStrategy = ref.termStrategy; + this.orientation = ref.orientation; + } + + @Override + public MappedFieldType clone() { + return new GeoShapeFieldType(this); + } + + public PrefixTreeStrategy defaultStrategy() { + return this.defaultStrategy; + } + + public PrefixTreeStrategy resolveStrategy(String strategyName) { + if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) { + return recursiveStrategy; + } + if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) { + return termStrategy; + } + throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]"); + } + + public void setStrategies(String defaultStrategy, RecursivePrefixTreeStrategy recursiveStrategy, TermQueryPrefixTreeStrategy termStrategy) { + checkIfFrozen(); + this.recursiveStrategy = recursiveStrategy; + this.termStrategy = termStrategy; + this.defaultStrategy = resolveStrategy(defaultStrategy); + } + + public void setDistErrPct(double distErrPct) { + checkIfFrozen(); + this.recursiveStrategy.setDistErrPct(distErrPct); + this.termStrategy.setDistErrPct(distErrPct); + } + + public Orientation orientation() { return this.orientation; } + + public void setOrientation(Orientation orientation) { + checkIfFrozen(); + this.orientation = orientation; + } + + @Override + public String value(Object value) { + throw new UnsupportedOperationException("GeoShape fields cannot be converted to String values"); + } + + } + + public GeoShapeFieldMapper(MappedFieldType fieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(fieldType, false, null, indexSettings, multiFields, copyTo); } @Override - public FieldType defaultFieldType() { + public GeoShapeFieldType fieldType() { + return (GeoShapeFieldType)fieldType; + } + + @Override + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -261,18 +328,18 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } shape = shapeBuilder.build(); } - Field[] fields = defaultStrategy.createIndexableFields(shape); + Field[] fields = fieldType().defaultStrategy().createIndexableFields(shape); if (fields == null || fields.length == 0) { return null; } for (Field field : fields) { if (!customBoost()) { - field.setBoost(boost); + field.setBoost(fieldType.boost()); } context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e); + throw new MapperParsingException("failed to parse [" + fieldType.names().fullName() + "]", e); } return null; } @@ -281,29 +348,29 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { super.merge(mergeWith, mergeResult); if (!this.getClass().equals(mergeWith.getClass())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different field type"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different field type"); return; } final GeoShapeFieldMapper fieldMergeWith = (GeoShapeFieldMapper) mergeWith; - final PrefixTreeStrategy mergeWithStrategy = fieldMergeWith.defaultStrategy; + final PrefixTreeStrategy mergeWithStrategy = fieldMergeWith.fieldType().defaultStrategy(); // prevent user from changing strategies - if (!(this.defaultStrategy.getClass().equals(mergeWithStrategy.getClass()))) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different strategy"); + if (!(this.fieldType().defaultStrategy().getClass().equals(mergeWithStrategy.getClass()))) { + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different strategy"); } - final SpatialPrefixTree grid = this.defaultStrategy.getGrid(); + final SpatialPrefixTree grid = this.fieldType().defaultStrategy().getGrid(); final SpatialPrefixTree mergeGrid = mergeWithStrategy.getGrid(); // prevent user from changing trees (changes encoding) if (!grid.getClass().equals(mergeGrid.getClass())) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different tree"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree"); } // TODO we should allow this, but at the moment levels is used to build bookkeeping variables // in lucene's SpatialPrefixTree implementations, need a patch to correct that first if (grid.getMaxLevels() != mergeGrid.getMaxLevels()) { - mergeResult.addConflict("mapper [" + names.fullName() + "] has different tree_levels or precision"); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree_levels or precision"); } // bail if there were merge conflicts @@ -312,11 +379,12 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } // change distance error percent - this.defaultStrategy.setDistErrPct(mergeWithStrategy.getDistErrPct()); - + this.fieldType = this.fieldType.clone(); + this.fieldType().setDistErrPct(mergeWithStrategy.getDistErrPct()); // change orientation - this is allowed because existing dateline spanning shapes // have already been unwound and segmented - this.shapeOrientation = fieldMergeWith.shapeOrientation; + this.fieldType().setOrientation(fieldMergeWith.fieldType().orientation()); + this.fieldType.freeze(); } @Override @@ -328,25 +396,25 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { builder.field("type", contentType()); // TODO: Come up with a better way to get the name, maybe pass it from builder - if (defaultStrategy.getGrid() instanceof GeohashPrefixTree) { + if (fieldType().defaultStrategy().getGrid() instanceof GeohashPrefixTree) { // Don't emit the tree name since GeohashPrefixTree is the default // Only emit the tree levels if it isn't the default value - if (includeDefaults || defaultStrategy.getGrid().getMaxLevels() != Defaults.GEOHASH_LEVELS) { - builder.field(Names.TREE_LEVELS, defaultStrategy.getGrid().getMaxLevels()); + if (includeDefaults || fieldType().defaultStrategy().getGrid().getMaxLevels() != Defaults.GEOHASH_LEVELS) { + builder.field(Names.TREE_LEVELS, fieldType().defaultStrategy().getGrid().getMaxLevels()); } } else { builder.field(Names.TREE, Names.TREE_QUADTREE); - if (includeDefaults || defaultStrategy.getGrid().getMaxLevels() != Defaults.QUADTREE_LEVELS) { - builder.field(Names.TREE_LEVELS, defaultStrategy.getGrid().getMaxLevels()); + if (includeDefaults || fieldType().defaultStrategy().getGrid().getMaxLevels() != Defaults.QUADTREE_LEVELS) { + builder.field(Names.TREE_LEVELS, fieldType().defaultStrategy().getGrid().getMaxLevels()); } } - if (includeDefaults || defaultStrategy.getDistErrPct() != Defaults.DISTANCE_ERROR_PCT) { - builder.field(Names.DISTANCE_ERROR_PCT, defaultStrategy.getDistErrPct()); + if (includeDefaults || fieldType().defaultStrategy().getDistErrPct() != Defaults.DISTANCE_ERROR_PCT) { + builder.field(Names.DISTANCE_ERROR_PCT, fieldType().defaultStrategy().getDistErrPct()); } - if (includeDefaults || orientation() != Defaults.ORIENTATION) { - builder.field(Names.ORIENTATION, orientation()); + if (includeDefaults || fieldType().orientation() != Defaults.ORIENTATION) { + builder.field(Names.ORIENTATION, fieldType().orientation()); } } @@ -354,34 +422,4 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { protected String contentType() { return CONTENT_TYPE; } - - @Override - public String value(Object value) { - throw new UnsupportedOperationException("GeoShape fields cannot be converted to String values"); - } - - public PrefixTreeStrategy defaultStrategy() { - return this.defaultStrategy; - } - - public PrefixTreeStrategy recursiveStrategy() { - return this.recursiveStrategy; - } - - public PrefixTreeStrategy termStrategy() { - return this.termStrategy; - } - - public Orientation orientation() { return this.shapeOrientation; } - - public PrefixTreeStrategy resolveStrategy(String strategyName) { - if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) { - return recursiveStrategy; - } - if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) { - return termStrategy; - } - throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]"); - } - } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index c310d96476c..364dee5852b 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; @@ -34,8 +33,8 @@ import org.elasticsearch.common.lucene.all.AllField; import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -45,7 +44,6 @@ import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.similarity.SimilarityLookupService; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -80,11 +78,12 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { public static final String INDEX_NAME = AllFieldMapper.NAME; public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_ENABLED; - public static final FieldType FIELD_TYPE = new FieldType(); + public static final MappedFieldType FIELD_TYPE = new AllFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); FIELD_TYPE.setTokenized(true); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } } @@ -94,7 +93,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { private EnabledAttributeMapper enabled = Defaults.ENABLED; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); + super(Defaults.NAME, Defaults.FIELD_TYPE); builder = this; indexName = Defaults.INDEX_NAME; } @@ -113,7 +112,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { } fieldType.setTokenized(true); - return new AllFieldMapper(name, fieldType, indexAnalyzer, searchAnalyzer, enabled, similarity, normsLoading, fieldDataSettings, context.indexSettings()); + return new AllFieldMapper(fieldType, enabled, fieldDataSettings, context.indexSettings()); } } @@ -156,18 +155,49 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { } } + public static class AllFieldType extends MappedFieldType { + + public AllFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected AllFieldType(AllFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new AllFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public Query queryStringTermQuery(Term term) { + return new AllTermQuery(term); + } + + @Override + public Query termQuery(Object value, QueryParseContext context) { + return queryStringTermQuery(createTerm(value)); + } + } private EnabledAttributeMapper enabledState; public AllFieldMapper(Settings indexSettings) { - this(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE), null, null, Defaults.ENABLED, null, null, null, indexSettings); + this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED, null, indexSettings); } - protected AllFieldMapper(String name, FieldType fieldType, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, - EnabledAttributeMapper enabled, SimilarityProvider similarity, Loading normsLoading, + protected AllFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, name, name, name), 1.0f, fieldType, false, indexAnalyzer, searchAnalyzer, - similarity, normsLoading, fieldDataSettings, indexSettings); + super(fieldType, false, fieldDataSettings, indexSettings); this.enabledState = enabled; } @@ -177,7 +207,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -186,16 +216,6 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { return new FieldDataType("string"); } - @Override - public Query queryStringTermQuery(Term term) { - return new AllTermQuery(term); - } - - @Override - public Query termQuery(Object value, QueryParseContext context) { - return queryStringTermQuery(createTerm(value)); - } - @Override public void preParse(ParseContext context) throws IOException { } @@ -219,11 +239,11 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { // reset the entries context.allEntries().reset(); Analyzer analyzer = findAnalyzer(context); - fields.add(new AllField(names.indexName(), context.allEntries(), analyzer, fieldType)); + fields.add(new AllField(fieldType.names().indexName(), context.allEntries(), analyzer, fieldType)); } private Analyzer findAnalyzer(ParseContext context) { - Analyzer analyzer = indexAnalyzer; + Analyzer analyzer = fieldType.indexAnalyzer(); if (analyzer == null) { analyzer = context.docMapper().mappers().indexAnalyzer(); if (analyzer == null) { @@ -233,14 +253,6 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { } return analyzer; } - - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } @Override protected String contentType() { @@ -294,8 +306,8 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { doXContentAnalyzers(builder, includeDefaults); - if (similarity() != null) { - builder.field("similarity", similarity().name()); + if (fieldType().similarity() != null) { + builder.field("similarity", fieldType().similarity().name()); } else if (includeDefaults) { builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY); } @@ -303,14 +315,14 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } } @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) { - mergeResult.addConflict("mapper [" + names.fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); + mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); } super.merge(mergeWith, mergeResult); } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java index 2d4a6975d94..631fdb87771 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -38,6 +39,7 @@ import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; +import org.elasticsearch.search.highlight.HighlightBuilder; import java.io.IOException; import java.util.ArrayList; @@ -65,13 +67,16 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa public static final String NAME = FieldNamesFieldMapper.NAME; public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_ENABLED; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new FieldNamesFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(false); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } } @@ -80,7 +85,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa private EnabledAttributeMapper enabledState = Defaults.ENABLED_STATE; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); + super(Defaults.NAME, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @@ -98,7 +103,8 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa @Override public FieldNamesFieldMapper build(BuilderContext context) { - return new FieldNamesFieldMapper(name, indexName, boost, fieldType, enabledState, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new FieldNamesFieldMapper(fieldType, enabledState, fieldDataSettings, context.indexSettings()); } } @@ -127,17 +133,45 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa } } - private final FieldType defaultFieldType; + public static class FieldNamesFieldType extends MappedFieldType { + + public FieldNamesFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected FieldNamesFieldType(FieldNamesFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new FieldNamesFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + } + + private final MappedFieldType defaultFieldType; private EnabledAttributeMapper enabledState; private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled public FieldNamesFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), Defaults.ENABLED_STATE, null, indexSettings); + this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED_STATE, null, indexSettings); } - public FieldNamesFieldMapper(String name, String indexName, float boost, FieldType fieldType, EnabledAttributeMapper enabledState, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), boost, fieldType, false, Lucene.KEYWORD_ANALYZER, - Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + public FieldNamesFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, false, fieldDataSettings, indexSettings); this.defaultFieldType = Defaults.FIELD_TYPE; this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0); this.enabledState = enabledState; @@ -148,7 +182,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return defaultFieldType; } @@ -157,19 +191,6 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa return new FieldDataType("string"); } - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } - - @Override - public boolean useTermQueryWithQueryString() { - return true; - } - @Override public void preParse(ParseContext context) throws IOException { } @@ -230,7 +251,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa for (String path : paths) { for (String fieldName : extractFieldNames(path)) { if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - document.add(new Field(names().indexName(), fieldName, fieldType)); + document.add(new Field(fieldType().names().indexName(), fieldName, fieldType)); } } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index b7438540c40..87cd4f7cb31 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.internal; import com.google.common.collect.Iterables; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; @@ -43,6 +42,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -73,14 +73,16 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = IdFieldMapper.NAME; - public static final String INDEX_NAME = IdFieldMapper.NAME; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new IdFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.NONE); FIELD_TYPE.setStored(false); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } @@ -92,8 +94,8 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { private String path = Defaults.PATH; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); - indexName = Defaults.INDEX_NAME; + super(Defaults.NAME, Defaults.FIELD_TYPE); + indexName = Defaults.NAME; } public Builder path(String path) { @@ -108,7 +110,8 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { @Override public IdFieldMapper build(BuilderContext context) { - return new IdFieldMapper(name, indexName, boost, fieldType, docValues, path, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new IdFieldMapper(fieldType, docValues, path, fieldDataSettings, context.indexSettings()); } } @@ -133,21 +136,109 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { } } + public static class IdFieldType extends MappedFieldType { + + public IdFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected IdFieldType(IdFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new IdFieldType(this); + } + + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + + @Override + public Query termQuery(Object value, @Nullable QueryParseContext context) { + if (indexOptions() != IndexOptions.NONE || context == null) { + return super.termQuery(value, context); + } + final BytesRef[] uids = Uid.createUidsForTypesAndId(context.queryTypes(), value); + return new TermsQuery(UidFieldMapper.NAME, uids); + } + + @Override + public Query termsQuery(List values, @Nullable QueryParseContext context) { + if (indexOptions() != IndexOptions.NONE || context == null) { + return super.termsQuery(values, context); + } + return new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(context.queryTypes(), values)); + } + + @Override + public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + if (indexOptions() != IndexOptions.NONE || context == null) { + return super.prefixQuery(value, method, context); + } + Collection queryTypes = context.queryTypes(); + BooleanQuery query = new BooleanQuery(); + for (String queryType : queryTypes) { + PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value)))); + if (method != null) { + prefixQuery.setRewriteMethod(method); + } + query.add(prefixQuery, BooleanClause.Occur.SHOULD); + } + return query; + } + + @Override + public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { + if (indexOptions() != IndexOptions.NONE || context == null) { + return super.regexpQuery(value, flags, maxDeterminizedStates, method, context); + } + Collection queryTypes = context.queryTypes(); + if (queryTypes.size() == 1) { + RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value))), + flags, maxDeterminizedStates); + if (method != null) { + regexpQuery.setRewriteMethod(method); + } + return regexpQuery; + } + BooleanQuery query = new BooleanQuery(); + for (String queryType : queryTypes) { + RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); + if (method != null) { + regexpQuery.setRewriteMethod(method); + } + query.add(regexpQuery, BooleanClause.Occur.SHOULD); + } + return query; + } + } + private final String path; public IdFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.INDEX_NAME, Defaults.BOOST, idFieldType(indexSettings), null, Defaults.PATH, null, indexSettings); + this(idFieldType(indexSettings), null, Defaults.PATH, null, indexSettings); } - protected IdFieldMapper(String name, String indexName, float boost, FieldType fieldType, Boolean docValues, String path, + protected IdFieldMapper(MappedFieldType fieldType, Boolean docValues, String path, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), boost, fieldType, docValues, Lucene.KEYWORD_ANALYZER, - Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + super(fieldType, docValues, fieldDataSettings, indexSettings); this.path = path; } - private static FieldType idFieldType(Settings indexSettings) { - FieldType fieldType = new FieldType(Defaults.FIELD_TYPE); + private static MappedFieldType idFieldType(Settings indexSettings) { + MappedFieldType fieldType = Defaults.FIELD_TYPE.clone(); boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0); if (pre2x && indexSettings.getAsBoolean("index.mapping._id.indexed", true) == false) { fieldType.setTokenized(false); @@ -160,7 +251,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -169,78 +260,6 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { return new FieldDataType("string"); } - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } - - @Override - public boolean useTermQueryWithQueryString() { - return true; - } - - @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { - if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { - return super.termQuery(value, context); - } - final BytesRef[] uids = Uid.createUidsForTypesAndId(context.queryTypes(), value); - return new TermsQuery(UidFieldMapper.NAME, uids); - } - - @Override - public Query termsQuery(List values, @Nullable QueryParseContext context) { - if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { - return super.termsQuery(values, context); - } - return new TermsQuery(UidFieldMapper.NAME, Uid.createUidsForTypesAndIds(context.queryTypes(), values)); - } - - @Override - public Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { - if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { - return super.prefixQuery(value, method, context); - } - Collection queryTypes = context.queryTypes(); - BooleanQuery query = new BooleanQuery(); - for (String queryType : queryTypes) { - PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value)))); - if (method != null) { - prefixQuery.setRewriteMethod(method); - } - query.add(prefixQuery, BooleanClause.Occur.SHOULD); - } - return query; - } - - @Override - public Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) { - if (fieldType.indexOptions() != IndexOptions.NONE || context == null) { - return super.regexpQuery(value, flags, maxDeterminizedStates, method, context); - } - Collection queryTypes = context.queryTypes(); - if (queryTypes.size() == 1) { - RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(Iterables.getFirst(queryTypes, null), BytesRefs.toBytesRef(value))), - flags, maxDeterminizedStates); - if (method != null) { - regexpQuery.setRewriteMethod(method); - } - return regexpQuery; - } - BooleanQuery query = new BooleanQuery(); - for (String queryType : queryTypes) { - RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); - if (method != null) { - regexpQuery.setRewriteMethod(method); - } - query.add(regexpQuery, BooleanClause.Occur.SHOULD); - } - return query; - } - @Override public void preParse(ParseContext context) throws IOException { if (context.sourceToParse().id() != null) { @@ -270,10 +289,10 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { } // else we are in the pre/post parse phase if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - fields.add(new Field(names.indexName(), context.id(), fieldType)); + fields.add(new Field(fieldType.names().indexName(), context.id(), fieldType)); } - if (hasDocValues()) { - fields.add(new BinaryDocValuesField(names.indexName(), new BytesRef(context.id()))); + if (fieldType().hasDocValues()) { + fields.add(new BinaryDocValuesField(fieldType.names().indexName(), new BytesRef(context.id()))); } } @@ -310,7 +329,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } builder.endObject(); return builder; diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index 34ab0bcb4ae..00fe4013ed8 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; @@ -30,6 +29,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilders; import org.elasticsearch.index.mapper.MapperParsingException; @@ -38,6 +38,7 @@ import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; +import org.elasticsearch.search.highlight.HighlightBuilder; import java.io.IOException; import java.util.Iterator; @@ -59,13 +60,16 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = IndexFieldMapper.NAME; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new IndexFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(false); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } @@ -77,7 +81,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); + super(Defaults.NAME, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @@ -88,7 +92,8 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper @Override public IndexFieldMapper build(BuilderContext context) { - return new IndexFieldMapper(name, indexName, boost, fieldType, enabledState, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new IndexFieldMapper(fieldType, enabledState, fieldDataSettings, context.indexSettings()); } } @@ -114,16 +119,39 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper } } + public static class IndexFieldType extends MappedFieldType { + + public IndexFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected IndexFieldType(IndexFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new IndexFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + } + private EnabledAttributeMapper enabledState; public IndexFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), Defaults.ENABLED_STATE, null, indexSettings); + this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED_STATE, null, indexSettings); } - public IndexFieldMapper(String name, String indexName, float boost, FieldType fieldType, EnabledAttributeMapper enabledState, + public IndexFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), boost, fieldType, false, Lucene.KEYWORD_ANALYZER, - Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + super(fieldType, false, fieldDataSettings, indexSettings); this.enabledState = enabledState; } @@ -132,7 +160,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -142,16 +170,8 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper } public String value(Document document) { - Field field = (Field) document.getField(names.indexName()); - return field == null ? null : value(field); - } - - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); + Field field = (Field) document.getField(fieldType.names().indexName()); + return field == null ? null : (String)fieldType().value(field); } @Override @@ -174,7 +194,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper if (!enabledState.enabled) { return; } - fields.add(new Field(names.indexName(), context.index(), fieldType)); + fields.add(new Field(fieldType.names().indexName(), context.index(), fieldType)); } @Override @@ -202,7 +222,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } } builder.endObject(); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index 7aca0b17ea6..ea79136beb1 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.internal; import com.google.common.base.Objects; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.Query; @@ -35,6 +34,7 @@ import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -70,18 +70,21 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = ParentFieldMapper.NAME; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new ParentFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } } - public static class Builder extends Mapper.Builder { + public static class Builder extends AbstractFieldMapper.Builder { protected String indexName; @@ -89,7 +92,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper protected Settings fieldDataSettings; public Builder() { - super(Defaults.NAME); + super(Defaults.NAME, Defaults.FIELD_TYPE); this.indexName = name; builder = this; } @@ -109,7 +112,8 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper if (type == null) { throw new MapperParsingException("Parent mapping must contain the parent type"); } - return new ParentFieldMapper(name, indexName, type, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new ParentFieldMapper(fieldType, type, fieldDataSettings, context.indexSettings()); } } @@ -130,8 +134,8 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper } else if (fieldName.equals("fielddata")) { // Only take over `loading`, since that is the only option now that is configurable: Map fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata")); - if (fieldDataSettings.containsKey(Loading.KEY)) { - Settings settings = settingsBuilder().put(Loading.KEY, fieldDataSettings.get(Loading.KEY)).build(); + if (fieldDataSettings.containsKey(MappedFieldType.Loading.KEY)) { + Settings settings = settingsBuilder().put(MappedFieldType.Loading.KEY, fieldDataSettings.get(MappedFieldType.Loading.KEY)).build(); builder.fieldDataSettings(settings); } iterator.remove(); @@ -141,19 +145,101 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper } } + public static class ParentFieldType extends MappedFieldType { + + public ParentFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected ParentFieldType(ParentFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new ParentFieldType(this); + } + + @Override + public Uid value(Object value) { + if (value == null) { + return null; + } + return Uid.createUid(value.toString()); + } + + @Override + public Object valueForSearch(Object value) { + if (value == null) { + return null; + } + String sValue = value.toString(); + if (sValue == null) { + return null; + } + int index = sValue.indexOf(Uid.DELIMITER); + if (index == -1) { + return sValue; + } + return sValue.substring(index + 1); + } + + /** + * We don't need to analyzer the text, and we need to convert it to UID... + */ + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + + @Override + public Query termQuery(Object value, @Nullable QueryParseContext context) { + return termsQuery(Collections.singletonList(value), context); + } + + @Override + public Query termsQuery(List values, @Nullable QueryParseContext context) { + if (context == null) { + return super.termsQuery(values, context); + } + + List types = new ArrayList<>(context.mapperService().types().size()); + for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) { + if (!documentMapper.parentFieldMapper().active()) { + types.add(documentMapper.type()); + } + } + + List bValues = new ArrayList<>(values.size()); + for (Object value : values) { + BytesRef bValue = BytesRefs.toBytesRef(value); + if (Uid.hasDelimiter(bValue)) { + bValues.add(bValue); + } else { + // we use all non child types, cause we don't know if its exact or not... + for (String type : types) { + bValues.add(Uid.createUidAsBytes(type, bValue)); + } + } + } + return new TermsQuery(names().indexName(), bValues); + } + } + private final String type; private final BytesRef typeAsBytes; - protected ParentFieldMapper(String name, String indexName, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), false, - Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + protected ParentFieldMapper(MappedFieldType fieldType, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, false, fieldDataSettings, indexSettings); this.type = type; this.typeAsBytes = type == null ? null : new BytesRef(type); } public ParentFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, null, null, indexSettings); - this.fieldDataType = new FieldDataType("_parent", settingsBuilder().put(Loading.KEY, Loading.LAZY_VALUE)); + this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings); + this.fieldType = this.fieldType.clone(); + this.fieldType.setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.LAZY_VALUE))); + this.fieldType.freeze(); } public String type() { @@ -161,13 +247,13 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @Override public FieldDataType defaultFieldDataType() { - return new FieldDataType("_parent", settingsBuilder().put(Loading.KEY, Loading.EAGER_VALUE)); + return new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.EAGER_VALUE)); } @Override @@ -189,7 +275,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper // we are in the parsing of _parent phase String parentId = context.parser().text(); context.sourceToParse().parent(parentId); - fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType)); + fields.add(new Field(fieldType.names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType)); } else { // otherwise, we are running it post processing of the xcontent String parsedParentId = context.doc().get(Defaults.NAME); @@ -200,7 +286,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper throw new MapperParsingException("No parent id provided, not within the document, and not externally"); } // we did not add it in the parsing phase, add it now - fields.add(new Field(names.indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType)); + fields.add(new Field(fieldType.names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType)); } else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), type, parentId))) { throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]"); } @@ -209,87 +295,6 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper // we have parent mapping, yet no value was set, ignore it... } - @Override - public Uid value(Object value) { - if (value == null) { - return null; - } - return Uid.createUid(value.toString()); - } - - @Override - public Object valueForSearch(Object value) { - if (value == null) { - return null; - } - String sValue = value.toString(); - if (sValue == null) { - return null; - } - int index = sValue.indexOf(Uid.DELIMITER); - if (index == -1) { - return sValue; - } - return sValue.substring(index + 1); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - if (value instanceof BytesRef) { - BytesRef bytesRef = (BytesRef) value; - if (Uid.hasDelimiter(bytesRef)) { - return bytesRef; - } - return Uid.createUidAsBytes(typeAsBytes, bytesRef); - } - String sValue = value.toString(); - if (sValue.indexOf(Uid.DELIMITER) == -1) { - return Uid.createUidAsBytes(type, sValue); - } - return super.indexedValueForSearch(value); - } - - @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { - return termsQuery(Collections.singletonList(value), context); - } - - @Override - public Query termsQuery(List values, @Nullable QueryParseContext context) { - if (context == null) { - return super.termsQuery(values, context); - } - - List types = new ArrayList<>(context.mapperService().types().size()); - for (DocumentMapper documentMapper : context.mapperService().docMappers(false)) { - if (!documentMapper.parentFieldMapper().active()) { - types.add(documentMapper.type()); - } - } - - List bValues = new ArrayList<>(values.size()); - for (Object value : values) { - BytesRef bValue = BytesRefs.toBytesRef(value); - if (Uid.hasDelimiter(bValue)) { - bValues.add(bValue); - } else { - // we use all non child types, cause we don't know if its exact or not... - for (String type : types) { - bValues.add(Uid.createUidAsBytes(type, bValue)); - } - } - } - return new TermsQuery(names.indexName(), bValues); - } - - /** - * We don't need to analyzer the text, and we need to convert it to UID... - */ - @Override - public boolean useTermQueryWithQueryString() { - return true; - } - @Override protected String contentType() { return CONTENT_TYPE; @@ -307,12 +312,28 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } builder.endObject(); return builder; } + @Override + public BytesRef indexedValueForSearch(Object value) { + if (value instanceof BytesRef) { + BytesRef bytesRef = (BytesRef) value; + if (Uid.hasDelimiter(bytesRef)) { + return bytesRef; + } + return Uid.createUidAsBytes(typeAsBytes, bytesRef); + } + String sValue = value.toString(); + if (sValue.indexOf(Uid.DELIMITER) == -1) { + return Uid.createUidAsBytes(type, sValue); + } + return super.indexedValueForSearch(value); + } + @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { ParentFieldMapper other = (ParentFieldMapper) mergeWith; @@ -322,14 +343,16 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper if (!mergeResult.simulate()) { ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith; + this.fieldType = this.fieldType.clone(); if (fieldMergeWith.customFieldDataSettings != null) { if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) { this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings; - this.fieldDataType = new FieldDataType(defaultFieldDataType().getType(), + this.fieldType.setFieldDataType(new FieldDataType(defaultFieldDataType().getType(), builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings) - ); + )); } } + this.fieldType.freeze(); } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index 96b9375d9ad..d03238caf6e 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; @@ -30,6 +29,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -58,13 +58,16 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = "_routing"; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new RoutingFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } @@ -79,7 +82,7 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe private String path = Defaults.PATH; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); + super(Defaults.NAME, Defaults.FIELD_TYPE); } public Builder required(boolean required) { @@ -121,6 +124,29 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe } } + public static class RoutingFieldType extends MappedFieldType { + + public RoutingFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected RoutingFieldType(RoutingFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new RoutingFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + } private boolean required; private final String path; @@ -129,15 +155,14 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe this(Defaults.FIELD_TYPE, Defaults.REQUIRED, Defaults.PATH, null, indexSettings); } - protected RoutingFieldMapper(FieldType fieldType, boolean required, String path, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), 1.0f, fieldType, false, Lucene.KEYWORD_ANALYZER, - Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + protected RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, false, fieldDataSettings, indexSettings); this.required = required; this.path = path; } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -159,16 +184,8 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe } public String value(Document document) { - Field field = (Field) document.getField(names.indexName()); - return field == null ? null : value(field); - } - - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); + Field field = (Field) document.getField(fieldType.names().indexName()); + return field == null ? null : (String)value(field); } @Override @@ -194,10 +211,10 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe String routing = context.sourceToParse().routing(); if (routing != null) { if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) { - context.ignoredValue(names.indexName(), routing); + context.ignoredValue(fieldType.names().indexName(), routing); return; } - fields.add(new Field(names.indexName(), routing, fieldType)); + fields.add(new Field(fieldType.names().indexName(), routing, fieldType)); } } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java index 1d8ad2a422c..a9435c5a1da 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java @@ -20,16 +20,18 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.IntegerFieldMapper; @@ -53,10 +55,12 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper { public static final String NAME = CONTENT_TYPE; public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED; - public static final FieldType SIZE_FIELD_TYPE = new FieldType(IntegerFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType SIZE_FIELD_TYPE = IntegerFieldMapper.Defaults.FIELD_TYPE.clone(); static { SIZE_FIELD_TYPE.setStored(true); + SIZE_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_32_BIT); + SIZE_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); SIZE_FIELD_TYPE.freeze(); } } @@ -66,7 +70,7 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper { protected EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.SIZE_FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT); + super(Defaults.NAME, Defaults.SIZE_FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); builder = this; } @@ -77,8 +81,19 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper { @Override public SizeFieldMapper build(BuilderContext context) { + setupFieldType(context); return new SizeFieldMapper(enabledState, fieldType, fieldDataSettings, context.indexSettings()); } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericIntegerAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 32; + } } public static class TypeParser implements Mapper.TypeParser { @@ -104,12 +119,12 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper { private EnabledAttributeMapper enabledState; public SizeFieldMapper(Settings indexSettings) { - this(Defaults.ENABLED_STATE, new FieldType(Defaults.SIZE_FIELD_TYPE), null, indexSettings); + this(Defaults.ENABLED_STATE, Defaults.SIZE_FIELD_TYPE.clone(), null, indexSettings); } - public SizeFieldMapper(EnabledAttributeMapper enabled, FieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(Defaults.NAME), Defaults.PRECISION_STEP_32_BIT, Defaults.BOOST, fieldType, false, Defaults.NULL_VALUE, - Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, null, fieldDataSettings, + public SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, false, Defaults.NULL_VALUE, + Defaults.IGNORE_MALFORMED, Defaults.COERCE, fieldDataSettings, indexSettings, MultiFields.empty(), null); this.enabledState = enabled; } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 7c8ae58d5fd..0324ef0d0d3 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal; import com.google.common.base.Objects; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.util.BytesRef; @@ -45,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -78,12 +78,15 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper public static final long COMPRESS_THRESHOLD = -1; public static final String FORMAT = null; // default format is to use the one provided - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new SourceFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.NONE); // not indexed FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } @@ -138,7 +141,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper @Override public SourceFieldMapper build(BuilderContext context) { - return new SourceFieldMapper(name, enabled, format, compress, compressThreshold, includes, excludes, context.indexSettings()); + return new SourceFieldMapper(enabled, format, compress, compressThreshold, includes, excludes, context.indexSettings()); } } @@ -195,6 +198,39 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper } } + public static class SourceFieldType extends MappedFieldType { + + public SourceFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected SourceFieldType(SourceFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new SourceFieldType(this); + } + + @Override + public byte[] value(Object value) { + if (value == null) { + return null; + } + BytesReference bValue; + if (value instanceof BytesRef) { + bValue = new BytesArray((BytesRef) value); + } else { + bValue = (BytesReference) value; + } + try { + return CompressorFactory.uncompressIfNeeded(bValue).toBytes(); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to decompress source", e); + } + } + } private final boolean enabled; @@ -212,13 +248,12 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper private XContentType formatContentType; public SourceFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.ENABLED, Defaults.FORMAT, null, -1, null, null, indexSettings); + this(Defaults.ENABLED, Defaults.FORMAT, null, -1, null, null, indexSettings); } - protected SourceFieldMapper(String name, boolean enabled, String format, Boolean compress, long compressThreshold, + protected SourceFieldMapper(boolean enabled, String format, Boolean compress, long compressThreshold, String[] includes, String[] excludes, Settings indexSettings) { - super(new Names(name, name, name, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), false, - Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, null, indexSettings); // Only stored. + super(Defaults.FIELD_TYPE.clone(), false, null, indexSettings); // Only stored. this.enabled = enabled; this.compress = compress; this.compressThreshold = compressThreshold; @@ -247,7 +282,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -358,25 +393,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper if (!source.hasArray()) { source = source.toBytesArray(); } - fields.add(new StoredField(names().indexName(), source.array(), source.arrayOffset(), source.length())); - } - - @Override - public byte[] value(Object value) { - if (value == null) { - return null; - } - BytesReference bValue; - if (value instanceof BytesRef) { - bValue = new BytesArray((BytesRef) value); - } else { - bValue = (BytesReference) value; - } - try { - return CompressorFactory.uncompressIfNeeded(bValue).toBytes(); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to decompress source", e); - } + fields.add(new StoredField(fieldType().names().indexName(), source.array(), source.arrayOffset(), source.length())); } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index 8e817ddd903..0040b4a5140 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; @@ -30,6 +29,9 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.AlreadyExpiredException; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.NumericLongAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -59,12 +61,14 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { public static class Defaults extends LongFieldMapper.Defaults { public static final String NAME = TTLFieldMapper.CONTENT_TYPE; - public static final FieldType TTL_FIELD_TYPE = new FieldType(LongFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType TTL_FIELD_TYPE = new TTLFieldType(); static { TTL_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); TTL_FIELD_TYPE.setStored(true); TTL_FIELD_TYPE.setTokenized(false); + TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); + TTL_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); TTL_FIELD_TYPE.freeze(); } @@ -78,7 +82,7 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { private long defaultTTL = Defaults.DEFAULT; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.TTL_FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); } public Builder enabled(EnabledAttributeMapper enabled) { @@ -93,8 +97,19 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { @Override public TTLFieldMapper build(BuilderContext context) { + setupFieldType(context); return new TTLFieldMapper(fieldType, enabledState, defaultTTL, ignoreMalformed(context),coerce(context), fieldDataSettings, context.indexSettings()); } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericLongAnalyzer.buildNamedAnalyzer(precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -121,18 +136,46 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { } } + public static class TTLFieldType extends LongFieldType { + + public TTLFieldType() { + } + + protected TTLFieldType(TTLFieldType ref) { + super(ref); + } + + @Override + public LongFieldType clone() { + return new TTLFieldType(this); + } + + // Overrides valueForSearch to display live value of remaining ttl + @Override + public Object valueForSearch(Object value) { + long now; + SearchContext searchContext = SearchContext.current(); + if (searchContext != null) { + now = searchContext.nowInMillis(); + } else { + now = System.currentTimeMillis(); + } + long val = value(value); + return val - now; + } + } + private EnabledAttributeMapper enabledState; private long defaultTTL; public TTLFieldMapper(Settings indexSettings) { - this(new FieldType(Defaults.TTL_FIELD_TYPE), Defaults.ENABLED_STATE, Defaults.DEFAULT, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings); + this(Defaults.TTL_FIELD_TYPE.clone(), Defaults.ENABLED_STATE, Defaults.DEFAULT, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings); } - protected TTLFieldMapper(FieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, Explicit ignoreMalformed, + protected TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, Explicit ignoreMalformed, Explicit coerce, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), Defaults.PRECISION_STEP_64_BIT, - Defaults.BOOST, fieldType, false, Defaults.NULL_VALUE, ignoreMalformed, coerce, - null, null, fieldDataSettings, indexSettings, MultiFields.empty(), null); + super(fieldType, false, Defaults.NULL_VALUE, ignoreMalformed, coerce, + fieldDataSettings, indexSettings, MultiFields.empty(), null); this.enabledState = enabled; this.defaultTTL = defaultTTL; } @@ -145,20 +188,6 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { return this.defaultTTL; } - // Overrides valueForSearch to display live value of remaining ttl - @Override - public Object valueForSearch(Object value) { - long now; - SearchContext searchContext = SearchContext.current(); - if (searchContext != null) { - now = searchContext.nowInMillis(); - } else { - now = System.currentTimeMillis(); - } - long val = value(value); - return val - now; - } - // Other implementation for realtime get display public Object valueForSearch(long expirationTime) { return expirationTime - System.currentTimeMillis(); @@ -207,7 +236,7 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { throw new AlreadyExpiredException(context.index(), context.type(), context.id(), timestamp, ttl, now); } // the expiration timestamp (timestamp + ttl) is set as field - fields.add(new CustomLongNumericField(this, expire, fieldType)); + fields.add(new CustomLongNumericField(this, expire, (NumberFieldType)fieldType)); } } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 9437ee3d056..5f068395a03 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.Version; @@ -32,6 +31,9 @@ import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.NumericDateAnalyzer; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -46,7 +48,6 @@ import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.index.mapper.MapperBuilders.timestamp; @@ -63,21 +64,26 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper public static final String NAME = "_timestamp"; // TODO: this should be removed - public static final FieldType PRE_20_FIELD_TYPE; - public static final FieldType FIELD_TYPE = new FieldType(DateFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType PRE_20_FIELD_TYPE; + public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT); + public static final DateFieldType FIELD_TYPE = new TimestampFieldType(); static { FIELD_TYPE.setStored(true); FIELD_TYPE.setTokenized(false); + FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER); + FIELD_TYPE.setIndexAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Defaults.PRECISION_STEP_64_BIT)); + FIELD_TYPE.setSearchAnalyzer(NumericDateAnalyzer.buildNamedAnalyzer(DATE_TIME_FORMATTER, Integer.MAX_VALUE)); FIELD_TYPE.freeze(); - PRE_20_FIELD_TYPE = new FieldType(FIELD_TYPE); + PRE_20_FIELD_TYPE = FIELD_TYPE.clone(); PRE_20_FIELD_TYPE.setStored(false); PRE_20_FIELD_TYPE.freeze(); } public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_DISABLED; public static final String PATH = null; - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT); public static final String DEFAULT_TIMESTAMP = "now"; } @@ -85,13 +91,16 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; private String path = Defaults.PATH; - private FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; private String defaultTimestamp = Defaults.DEFAULT_TIMESTAMP; private boolean explicitStore = false; private Boolean ignoreMissing = null; public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); + } + + DateFieldType fieldType() { + return (DateFieldType)fieldType; } public Builder enabled(EnabledAttributeMapper enabledState) { @@ -105,8 +114,8 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper } public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { - this.dateTimeFormatter = dateTimeFormatter; - return builder; + fieldType().setDateTimeFormatter(dateTimeFormatter); + return this; } public Builder defaultTimestamp(String defaultTimestamp) { @@ -131,9 +140,20 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper assert fieldType.stored(); fieldType.setStored(false); } - return new TimestampFieldMapper(fieldType, docValues, enabledState, path, dateTimeFormatter, defaultTimestamp, + setupFieldType(context); + return new TimestampFieldMapper(fieldType, docValues, enabledState, path, defaultTimestamp, ignoreMissing, - ignoreMalformed(context), coerce(context), normsLoading, fieldDataSettings, context.indexSettings()); + ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings()); + } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + return NumericDateAnalyzer.buildNamedAnalyzer(fieldType().dateTimeFormatter(), precisionStep); + } + + @Override + protected int maxPrecisionStep() { + return 64; } } @@ -190,7 +210,29 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper } } - private static FieldType defaultFieldType(Settings settings) { + public static class TimestampFieldType extends DateFieldType { + + public TimestampFieldType() {} + + protected TimestampFieldType(TimestampFieldType ref) { + super(ref); + } + + @Override + public DateFieldType clone() { + return new TimestampFieldType(this); + } + + /** + * Override the default behavior to return a timestamp + */ + @Override + public Object valueForSearch(Object value) { + return value(value); + } + } + + private static MappedFieldType defaultFieldType(Settings settings) { return Version.indexCreated(settings).onOrAfter(Version.V_2_0_0) ? Defaults.FIELD_TYPE : Defaults.PRE_20_FIELD_TYPE; } @@ -198,23 +240,18 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper private final String path; private final String defaultTimestamp; - private final FieldType defaultFieldType; + private final MappedFieldType defaultFieldType; private final Boolean ignoreMissing; public TimestampFieldMapper(Settings indexSettings) { - this(new FieldType(defaultFieldType(indexSettings)), null, Defaults.ENABLED, Defaults.PATH, Defaults.DATE_TIME_FORMATTER, Defaults.DEFAULT_TIMESTAMP, - null, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, null, indexSettings); + this(defaultFieldType(indexSettings).clone(), null, Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP, + null, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings); } - protected TimestampFieldMapper(FieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState, String path, - FormatDateTimeFormatter dateTimeFormatter, String defaultTimestamp, - Boolean ignoreMissing, - Explicit ignoreMalformed, Explicit coerce, Loading normsLoading, + protected TimestampFieldMapper(MappedFieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState, String path, + String defaultTimestamp, Boolean ignoreMissing, Explicit ignoreMalformed, Explicit coerce, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), dateTimeFormatter, - Defaults.PRECISION_STEP_64_BIT, Defaults.BOOST, fieldType, docValues, - Defaults.NULL_VALUE, TimeUnit.MILLISECONDS /*always milliseconds*/, - ignoreMalformed, coerce, null, normsLoading, fieldDataSettings, + super(fieldType, docValues, Defaults.NULL_VALUE, ignoreMalformed, coerce, fieldDataSettings, indexSettings, MultiFields.empty(), null); this.enabledState = enabledState; this.path = path; @@ -224,7 +261,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return defaultFieldType; } @@ -249,19 +286,6 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper return this.ignoreMissing; } - @Override - public FormatDateTimeFormatter dateTimeFormatter() { - return this.dateTimeFormatter; - } - - /** - * Override the default behavior to return a timestamp - */ - @Override - public Object valueForSearch(Object value) { - return value(value); - } - @Override public void preParse(ParseContext context) throws IOException { super.parse(context); @@ -281,14 +305,14 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper protected void innerParseCreateField(ParseContext context, List fields) throws IOException { if (enabledState.enabled) { long timestamp = context.sourceToParse().timestamp(); - if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored() && !hasDocValues()) { - context.ignoredValue(names.indexName(), String.valueOf(timestamp)); + if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored() && !fieldType().hasDocValues()) { + context.ignoredValue(fieldType.names().indexName(), String.valueOf(timestamp)); } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { - fields.add(new LongFieldMapper.CustomLongNumericField(this, timestamp, fieldType)); + fields.add(new LongFieldMapper.CustomLongNumericField(this, timestamp, (NumberFieldType)fieldType)); } - if (hasDocValues()) { - fields.add(new NumericDocValuesField(names.indexName(), timestamp)); + if (fieldType().hasDocValues()) { + fields.add(new NumericDocValuesField(fieldType.names().indexName(), timestamp)); } } } @@ -306,10 +330,10 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper // if all are defaults, no sense to write it at all if (!includeDefaults && indexed == indexedDefault && customFieldDataSettings == null && - fieldType.stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH - && dateTimeFormatter.format().equals(Defaults.DATE_TIME_FORMATTER.format()) + fieldType.stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH + && fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format()) && Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp) - && defaultDocValues() == hasDocValues()) { + && defaultDocValues() == fieldType().hasDocValues()) { return builder; } builder.startObject(CONTENT_TYPE); @@ -326,8 +350,8 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper if (includeDefaults || path != Defaults.PATH) { builder.field("path", path); } - if (includeDefaults || !dateTimeFormatter.format().equals(Defaults.DATE_TIME_FORMATTER.format())) { - builder.field("format", dateTimeFormatter.format()); + if (includeDefaults || !fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format())) { + builder.field("format", fieldType().dateTimeFormatter().format()); } if (includeDefaults || !Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) { builder.field("default", defaultTimestamp); @@ -338,7 +362,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } builder.endObject(); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index a6b214f97f9..9128534c468 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; @@ -36,6 +35,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -65,13 +65,16 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = TypeFieldMapper.NAME; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new TypeFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(false); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); } } @@ -79,13 +82,14 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { public static class Builder extends AbstractFieldMapper.Builder { public Builder() { - super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); + super(Defaults.NAME, Defaults.FIELD_TYPE); indexName = Defaults.NAME; } @Override public TypeFieldMapper build(BuilderContext context) { - return new TypeFieldMapper(name, indexName, boost, fieldType, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new TypeFieldMapper(fieldType, fieldDataSettings, context.indexSettings()); } } @@ -101,17 +105,53 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { } } - public TypeFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null, indexSettings); + public static class TypeFieldType extends MappedFieldType { + + public TypeFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected TypeFieldType(TypeFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new TypeFieldType(this); + } + + @Override + public String value(Object value) { + if (value == null) { + return null; + } + return value.toString(); + } + + @Override + public boolean useTermQueryWithQueryString() { + return true; + } + + @Override + public Query termQuery(Object value, @Nullable QueryParseContext context) { + if (indexOptions() == IndexOptions.NONE) { + return new ConstantScoreQuery(new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.typePrefixAsBytes(BytesRefs.toBytesRef(value))))); + } + return new ConstantScoreQuery(new TermQuery(createTerm(value))); + } } - public TypeFieldMapper(String name, String indexName, float boost, FieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), boost, fieldType, false, Lucene.KEYWORD_ANALYZER, - Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + public TypeFieldMapper(Settings indexSettings) { + this(Defaults.FIELD_TYPE.clone(), null, indexSettings); + } + + public TypeFieldMapper(MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, false, fieldDataSettings, indexSettings); } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -120,26 +160,6 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { return new FieldDataType("string"); } - @Override - public String value(Object value) { - if (value == null) { - return null; - } - return value.toString(); - } - - @Override - public Query termQuery(Object value, @Nullable QueryParseContext context) { - if (fieldType.indexOptions() == IndexOptions.NONE) { - return new ConstantScoreQuery(new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.typePrefixAsBytes(BytesRefs.toBytesRef(value))))); - } - return new ConstantScoreQuery(new TermQuery(createTerm(value))); - } - - @Override - public boolean useTermQueryWithQueryString() { - return true; - } @Override public void preParse(ParseContext context) throws IOException { @@ -161,9 +181,9 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) { return; } - fields.add(new Field(names.indexName(), context.type(), fieldType)); - if (hasDocValues()) { - fields.add(new SortedSetDocValuesField(names.indexName(), new BytesRef(context.type()))); + fields.add(new Field(fieldType.names().indexName(), context.type(), fieldType)); + if (fieldType().hasDocValues()) { + fields.add(new SortedSetDocValuesField(fieldType.names().indexName(), new BytesRef(context.type()))); } } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index da9f424d9b2..3d49df6089c 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; @@ -32,6 +31,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -61,17 +61,20 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { public static class Defaults extends AbstractFieldMapper.Defaults { public static final String NAME = UidFieldMapper.NAME; - public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); - public static final FieldType NESTED_FIELD_TYPE; + public static final MappedFieldType FIELD_TYPE = new UidFieldType(); + public static final MappedFieldType NESTED_FIELD_TYPE; static { FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); FIELD_TYPE.setTokenized(false); FIELD_TYPE.setStored(true); FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); FIELD_TYPE.freeze(); - NESTED_FIELD_TYPE = new FieldType(FIELD_TYPE); + NESTED_FIELD_TYPE = FIELD_TYPE.clone(); NESTED_FIELD_TYPE.setStored(false); NESTED_FIELD_TYPE.freeze(); } @@ -86,7 +89,8 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { @Override public UidFieldMapper build(BuilderContext context) { - return new UidFieldMapper(name, indexName, docValues, fieldDataSettings, context.indexSettings()); + fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + return new UidFieldMapper(fieldType, docValues, fieldDataSettings, context.indexSettings()); } } @@ -102,13 +106,36 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { } } - public UidFieldMapper(Settings indexSettings) { - this(Defaults.NAME, Defaults.NAME, null, null, indexSettings); + public static class UidFieldType extends MappedFieldType { + + public UidFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected UidFieldType(UidFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new UidFieldType(this); + } + + @Override + public Uid value(Object value) { + if (value == null) { + return null; + } + return Uid.createUid(value.toString()); + } } - protected UidFieldMapper(String name, String indexName, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(new Names(name, indexName, indexName, name), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), docValuesEnabled(docValues, indexSettings), - Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, fieldDataSettings, indexSettings); + public UidFieldMapper(Settings indexSettings) { + this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings); + } + + protected UidFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) { + super(fieldType, docValuesEnabled(docValues, indexSettings), fieldDataSettings, indexSettings); } static Boolean docValuesEnabled(Boolean docValues, Settings indexSettings) { @@ -119,7 +146,7 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -171,21 +198,13 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { Field uid = new Field(NAME, Uid.createUid(context.stringBuilder(), context.type(), context.id()), Defaults.FIELD_TYPE); context.uid(uid); fields.add(uid); - if (hasDocValues()) { + if (fieldType().hasDocValues()) { fields.add(new BinaryDocValuesField(NAME, new BytesRef(uid.stringValue()))); } } - @Override - public Uid value(Object value) { - if (value == null) { - return null; - } - return Uid.createUid(value.toString()); - } - public Term term(String uid) { - return createTerm(uid); + return new Term(fieldType().names().indexName(), fieldType().indexedValueForSearch(uid)); } @Override @@ -210,7 +229,7 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { if (customFieldDataSettings != null) { builder.field("fielddata", (Map) customFieldDataSettings.getAsMap()); } else if (includeDefaults) { - builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap()); + builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap()); } builder.endObject(); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java index 88b0074b634..f4a33b80b5c 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java @@ -20,13 +20,14 @@ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.index.DocValuesType; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -52,9 +53,13 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe public static class Defaults { public static final String NAME = VersionFieldMapper.NAME; - public static final float BOOST = 1.0f; - public static final FieldType FIELD_TYPE = NumericDocValuesField.TYPE; + public static final MappedFieldType FIELD_TYPE = new VersionFieldType(); + static { + FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); + FIELD_TYPE.setDocValuesType(DocValuesType.NUMERIC); + FIELD_TYPE.freeze(); + } } public static class Builder extends Mapper.Builder { @@ -86,6 +91,31 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe } } + public static class VersionFieldType extends MappedFieldType { + + public VersionFieldType() { + super(AbstractFieldMapper.Defaults.FIELD_TYPE); + } + + protected VersionFieldType(VersionFieldType ref) { + super(ref); + } + + @Override + public MappedFieldType clone() { + return new VersionFieldType(this); + } + + @Override + public Long value(Object value) { + if (value == null || (value instanceof Long)) { + return (Long) value; + } else { + return Long.parseLong(value.toString()); + } + } + } + private final ThreadLocal fieldCache = new ThreadLocal() { @Override protected Field initialValue() { @@ -94,7 +124,7 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe }; public VersionFieldMapper(Settings indexSettings) { - super(new Names(NAME, NAME, NAME, NAME), Defaults.BOOST, Defaults.FIELD_TYPE, true, null, null, null, null, null, indexSettings); + super(Defaults.FIELD_TYPE, true, null, indexSettings); } @Override @@ -116,15 +146,6 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe return null; } - @Override - public Long value(Object value) { - if (value == null || (value instanceof Long)) { - return (Long) value; - } else { - return Long.parseLong(value.toString()); - } - } - @Override public void postParse(ParseContext context) throws IOException { // In the case of nested docs, let's fill nested docs with version=1 so that Lucene doesn't write a Bitset for documents @@ -136,7 +157,7 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } diff --git a/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 0fa74b0ab8a..8cf6b93f5ca 100644 --- a/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -20,10 +20,8 @@ package org.elasticsearch.index.mapper.ip; import com.google.common.net.InetAddresses; - import org.apache.lucene.analysis.NumericTokenStream; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.NumericRangeQuery; @@ -43,6 +41,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericAnalyzer; import org.elasticsearch.index.analysis.NumericTokenizer; import org.elasticsearch.index.fielddata.FieldDataType; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -51,7 +50,6 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.Iterator; @@ -101,7 +99,7 @@ public class IpFieldMapper extends NumberFieldMapper { public static class Defaults extends NumberFieldMapper.Defaults { public static final String NULL_VALUE = null; - public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); + public static final MappedFieldType FIELD_TYPE = new IpFieldType(); static { FIELD_TYPE.freeze(); @@ -113,7 +111,7 @@ public class IpFieldMapper extends NumberFieldMapper { protected String nullValue = Defaults.NULL_VALUE; public Builder(String name) { - super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT); + super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); builder = this; } @@ -124,13 +122,23 @@ public class IpFieldMapper extends NumberFieldMapper { @Override public IpFieldMapper build(BuilderContext context) { - fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); - IpFieldMapper fieldMapper = new IpFieldMapper(buildNames(context), - fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), - similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + setupFieldType(context); + IpFieldMapper fieldMapper = new IpFieldMapper(fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), + fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } + + @Override + protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { + String name = precisionStep == Integer.MAX_VALUE ? "_ip/max" : ("_ip/" + precisionStep); + return new NamedAnalyzer(name, new NumericIpAnalyzer(precisionStep)); + } + + @Override + protected int maxPrecisionStep() { + return 64; + } } public static class TypeParser implements Mapper.TypeParser { @@ -154,21 +162,90 @@ public class IpFieldMapper extends NumberFieldMapper { } } + public static class IpFieldType extends NumberFieldType { + + public IpFieldType() {} + + protected IpFieldType(IpFieldType ref) { + super(ref); + } + + @Override + public NumberFieldType clone() { + return new IpFieldType(this); + } + + + @Override + public Long value(Object value) { + if (value == null) { + return null; + } + if (value instanceof Number) { + return ((Number) value).longValue(); + } + if (value instanceof BytesRef) { + return Numbers.bytesToLong((BytesRef) value); + } + return ipToLong(value.toString()); + } + + /** + * IPs should return as a string. + */ + @Override + public Object valueForSearch(Object value) { + Long val = value(value); + if (val == null) { + return null; + } + return longToIp(val); + } + + @Override + public BytesRef indexedValueForSearch(Object value) { + BytesRefBuilder bytesRef = new BytesRefBuilder(); + NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match + return bytesRef.get(); + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + lowerTerm == null ? null : parseValue(lowerTerm), + upperTerm == null ? null : parseValue(upperTerm), + includeLower, includeUpper); + } + + @Override + public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { + long iValue = ipToLong(value); + long iSim; + try { + iSim = ipToLong(fuzziness.asString()); + } catch (IllegalArgumentException e) { + iSim = fuzziness.asLong(); + } + return NumericRangeQuery.newLongRange(names().indexName(), numericPrecisionStep(), + iValue - iSim, + iValue + iSim, + true, true); + } + } + private String nullValue; - protected IpFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, + protected IpFieldMapper(MappedFieldType fieldType, Boolean docValues, String nullValue, Explicit ignoreMalformed, Explicit coerce, - SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, + @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, precisionStep, boost, fieldType, docValues, - ignoreMalformed, coerce, new NamedAnalyzer("_ip/" + precisionStep, new NumericIpAnalyzer(precisionStep)), - new NamedAnalyzer("_ip/max", new NumericIpAnalyzer(Integer.MAX_VALUE)), - similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); + super(fieldType, docValues, ignoreMalformed, coerce, + fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -177,45 +254,7 @@ public class IpFieldMapper extends NumberFieldMapper { return new FieldDataType("long"); } - @Override - protected int maxPrecisionStep() { - return 64; - } - - @Override - public Long value(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToLong((BytesRef) value); - } - return ipToLong(value.toString()); - } - - /** - * IPs should return as a string. - */ - @Override - public Object valueForSearch(Object value) { - Long val = value(value); - if (val == null) { - return null; - } - return longToIp(val); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - NumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - private long parseValue(Object value) { + private static long parseValue(Object value) { if (value instanceof Number) { return ((Number) value).longValue(); } @@ -225,29 +264,6 @@ public class IpFieldMapper extends NumberFieldMapper { return ipToLong(value.toString()); } - @Override - public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - long iValue = ipToLong(value); - long iSim; - try { - iSim = ipToLong(fuzziness.asString()); - } catch (IllegalArgumentException e) { - iSim = fuzziness.asLong(); - } - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - iValue - iSim, - iValue + iSim, - true, true); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { - return NumericRangeQuery.newLongRange(names.indexName(), precisionStep, - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - @Override public Query nullValueFilter() { if (nullValue == null) { @@ -276,16 +292,16 @@ public class IpFieldMapper extends NumberFieldMapper { return; } if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(names.fullName(), ipAsString, boost); + context.allEntries().addText(fieldType.names().fullName(), ipAsString, fieldType.boost()); } final long value = ipToLong(ipAsString); if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType); - field.setBoost(boost); + field.setBoost(fieldType.boost()); fields.add(field); } - if (hasDocValues()) { + if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @@ -310,8 +326,8 @@ public class IpFieldMapper extends NumberFieldMapper { protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", precisionStep); + if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { + builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); diff --git a/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java b/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java index ca0aed86f8e..d5e4c95b54c 100644 --- a/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/CommonTermsQueryParser.java @@ -165,7 +165,7 @@ public class CommonTermsQueryParser implements QueryParser { String field; FieldMapper mapper = parseContext.fieldMapper(fieldName); if (mapper != null) { - field = mapper.names().indexName(); + field = mapper.fieldType().names().indexName(); } else { field = fieldName; } @@ -173,7 +173,7 @@ public class CommonTermsQueryParser implements QueryParser { Analyzer analyzer = null; if (queryAnalyzer == null) { if (mapper != null) { - analyzer = mapper.searchAnalyzer(); + analyzer = mapper.fieldType().searchAnalyzer(); } if (analyzer == null && mapper != null) { analyzer = parseContext.getSearchAnalyzer(mapper); diff --git a/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java b/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java index fe981193805..0cf97b4cbe3 100644 --- a/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java @@ -98,7 +98,7 @@ public class ExistsQueryParser implements QueryParser { if (fieldNamesMapper!= null && fieldNamesMapper.enabled()) { final String f; if (mapper != null) { - f = mapper.names().indexName(); + f = mapper.fieldType().names().indexName(); } else { f = field; } diff --git a/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java b/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java index 1e8fd7cfa03..1a2e6608dec 100644 --- a/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/FieldMaskingSpanQueryParser.java @@ -92,7 +92,7 @@ public class FieldMaskingSpanQueryParser implements QueryParser { FieldMapper mapper = parseContext.fieldMapper(field); if (mapper != null) { - field = mapper.names().indexName(); + field = mapper.fieldType().names().indexName(); } FieldMaskingSpanQuery query = new FieldMaskingSpanQuery(inner, field); diff --git a/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java b/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java index 3a63ae68295..10a766ac068 100644 --- a/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java @@ -150,9 +150,9 @@ public class GeoShapeQueryParser implements QueryParser { GeoShapeFieldMapper shapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = shapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = shapeFieldMapper.fieldType().defaultStrategy(); if (strategyName != null) { - strategy = shapeFieldMapper.resolveStrategy(strategyName); + strategy = shapeFieldMapper.fieldType().resolveStrategy(strategyName); } Query query; if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) { diff --git a/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java b/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java index 08b38453dc6..363303181ac 100644 --- a/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java +++ b/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; @@ -71,11 +72,11 @@ public class GeohashCellQuery { * @return a new GeoBoundinboxfilter */ public static Query create(QueryParseContext context, GeoPointFieldMapper fieldMapper, String geohash, @Nullable List geohashes) { - if (fieldMapper.geoHashStringMapper() == null) { + MappedFieldType geoHashMapper = fieldMapper.fieldType().geohashFieldType(); + if (geoHashMapper == null) { throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled"); } - StringFieldMapper geoHashMapper = fieldMapper.geoHashStringMapper(); if (geohashes == null || geohashes.size() == 0) { return geoHashMapper.termQuery(geohash, context); } else { @@ -246,7 +247,7 @@ public class GeohashCellQuery { } GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper) mapper); - if (!geoMapper.isEnableGeohashPrefix()) { + if (!geoMapper.fieldType().isGeohashPrefixEnabled()) { throw new QueryParsingException(parseContext, "can't execute geohash_cell on field [" + fieldName + "], geohash_prefix is not enabled"); } diff --git a/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java b/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java index 16596b6bdfb..7c7ae54aff5 100644 --- a/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java @@ -116,7 +116,7 @@ public class MissingQueryParser implements QueryParser { if (fieldNamesMapper != null && fieldNamesMapper.enabled()) { final String f; if (mapper != null) { - f = mapper.names().indexName(); + f = mapper.fieldType().names().indexName(); } else { f = field; } diff --git a/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java b/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java index fe1c99e421a..b33da0994c5 100644 --- a/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java @@ -167,7 +167,7 @@ public class MoreLikeThisQueryParser implements QueryParser { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { String field = parser.text(); FieldMapper mapper = parseContext.fieldMapper(field); - moreLikeFields.add(mapper == null ? field : mapper.names().indexName()); + moreLikeFields.add(mapper == null ? field : mapper.fieldType().names().indexName()); } } else if (Fields.DOCUMENT_IDS.match(currentFieldName, parseContext.parseFlags())) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { diff --git a/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index e2be1229fa6..f279efa5052 100644 --- a/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -287,8 +287,8 @@ public class QueryParseContext { * TODO: remove this by moving defaults into mappers themselves */ public Analyzer getSearchAnalyzer(FieldMapper mapper) { - if (mapper.searchAnalyzer() != null) { - return mapper.searchAnalyzer(); + if (mapper.fieldType().searchAnalyzer() != null) { + return mapper.fieldType().searchAnalyzer(); } return mapperService().searchAnalyzer(); } @@ -297,8 +297,8 @@ public class QueryParseContext { * TODO: remove this by moving defaults into mappers themselves */ public Analyzer getSearchQuoteAnalyzer(FieldMapper mapper) { - if (mapper.searchQuoteAnalyzer() != null) { - return mapper.searchQuoteAnalyzer(); + if (mapper.fieldType().searchQuoteAnalyzer() != null) { + return mapper.fieldType().searchQuoteAnalyzer(); } return mapperService().searchQuoteAnalyzer(); } diff --git a/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java b/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java index e86ffebde31..fed95b9eb02 100644 --- a/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java @@ -128,7 +128,7 @@ public class RangeQueryParser implements QueryParser { "[range] time_zone when using ms since epoch format as it's UTC based can not be applied to [" + fieldName + "]"); } - query = ((DateFieldMapper) mapper).rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser, parseContext); + query = ((DateFieldMapper) mapper).fieldType().rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser, parseContext); } else { if (timeZone != null) { throw new QueryParsingException(parseContext, "[range] time_zone can not be applied to non date field [" diff --git a/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java b/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java index d48c5b7163f..ba70aa89bde 100644 --- a/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java +++ b/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java @@ -132,7 +132,7 @@ public class SimpleQueryStringParser implements QueryParser { } else { FieldMapper mapper = parseContext.fieldMapper(fField); if (mapper != null) { - fieldsAndWeights.put(mapper.names().indexName(), fBoost); + fieldsAndWeights.put(mapper.fieldType().names().indexName(), fBoost); } else { fieldsAndWeights.put(fField, fBoost); } diff --git a/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java b/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java index b1b3cbf5fbf..1b4e8c3af0e 100644 --- a/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/SpanTermQueryParser.java @@ -95,7 +95,7 @@ public class SpanTermQueryParser implements QueryParser { BytesRef valueBytes = null; FieldMapper mapper = parseContext.fieldMapper(fieldName); if (mapper != null) { - fieldName = mapper.names().indexName(); + fieldName = mapper.fieldType().names().indexName(); valueBytes = mapper.indexedValueForSearch(value); } if (valueBytes == null) { diff --git a/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java b/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java index f4170292251..1da9b5f748e 100644 --- a/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java @@ -160,7 +160,7 @@ public class TermsQueryParser implements QueryParser { FieldMapper fieldMapper = parseContext.fieldMapper(fieldName); if (fieldMapper != null) { - fieldName = fieldMapper.names().indexName(); + fieldName = fieldMapper.fieldType().names().indexName(); } if (lookupId != null) { diff --git a/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java b/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java index 36ca202173e..be664f04f6b 100644 --- a/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/WildcardQueryParser.java @@ -95,7 +95,7 @@ public class WildcardQueryParser implements QueryParser { BytesRef valueBytes; FieldMapper mapper = parseContext.fieldMapper(fieldName); if (mapper != null) { - fieldName = mapper.names().indexName(); + fieldName = mapper.fieldType().names().indexName(); valueBytes = mapper.indexedValueForSearch(value); } else { valueBytes = new BytesRef(value); diff --git a/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java b/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java index 375920a7fb0..b5a8363740d 100644 --- a/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java +++ b/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionParser.java @@ -263,7 +263,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser { } long origin = SearchContext.current().nowInMillis(); if (originString != null) { - origin = dateFieldMapper.parseToMilliseconds(originString); + origin = dateFieldMapper.fieldType().parseToMilliseconds(originString, false, null, null); } if (scaleString == null) { diff --git a/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/src/main/java/org/elasticsearch/index/search/MatchQuery.java index b2b747a9c92..344c9d3b29d 100644 --- a/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -157,7 +157,7 @@ public class MatchQuery { final String field; FieldMapper mapper = parseContext.fieldMapper(fieldName); if (mapper != null) { - field = mapper.names().indexName(); + field = mapper.fieldType().names().indexName(); } else { field = fieldName; } diff --git a/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index ea9666f0746..a31466a7dc6 100644 --- a/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -165,7 +165,7 @@ public class MultiMatchQuery extends MatchQuery { FieldMapper mapper = parseContext.fieldMapper(name); if (mapper != null) { Analyzer actualAnalyzer = getAnalyzer(mapper); - name = mapper.names().indexName(); + name = mapper.fieldType().names().indexName(); if (!groups.containsKey(actualAnalyzer)) { groups.put(actualAnalyzer, new ArrayList()); } diff --git a/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java b/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java index cb1befd76c7..bf1ba4578b2 100644 --- a/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java +++ b/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java @@ -31,7 +31,7 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; public class IndexedGeoBoundingBoxQuery { public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) { - if (!fieldMapper.isEnableLatLon()) { + if (!fieldMapper.fieldType().isLatLonEnabled()) { throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldMapper.name() + "], can't use indexed filter on it"); } //checks to see if bounding box crosses 180 degrees @@ -45,16 +45,16 @@ public class IndexedGeoBoundingBoxQuery { private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) { BooleanQuery filter = new BooleanQuery(); filter.setMinimumNumberShouldMatch(1); - filter.add(fieldMapper.lonMapper().rangeFilter(null, bottomRight.lon(), true, true), Occur.SHOULD); - filter.add(fieldMapper.lonMapper().rangeFilter(topLeft.lon(), null, true, true), Occur.SHOULD); - filter.add(fieldMapper.latMapper().rangeFilter(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST); + filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, null), Occur.SHOULD); + filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(topLeft.lon(), null, true, true, null), Occur.SHOULD); + filter.add(fieldMapper.fieldType().latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST); return new ConstantScoreQuery(filter); } private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper fieldMapper) { BooleanQuery filter = new BooleanQuery(); - filter.add(fieldMapper.lonMapper().rangeFilter(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST); - filter.add(fieldMapper.latMapper().rangeFilter(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST); + filter.add(fieldMapper.fieldType().lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, null), Occur.MUST); + filter.add(fieldMapper.fieldType().latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, null), Occur.MUST); return new ConstantScoreQuery(filter); } } diff --git a/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index 7c31ca2c8fd..c3232085130 100644 --- a/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -100,7 +100,7 @@ public class SimilarityService extends AbstractIndexComponent { @Override public Similarity get(String name) { FieldMapper mapper = mapperService.smartNameFieldMapper(name); - return (mapper != null && mapper.similarity() != null) ? mapper.similarity().get() : defaultSimilarity; + return (mapper != null && mapper.fieldType().similarity() != null) ? mapper.fieldType().similarity().get() : defaultSimilarity; } } } diff --git a/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java b/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java index 9ca66a65ec7..ec160dba1ec 100644 --- a/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java +++ b/src/main/java/org/elasticsearch/index/termvectors/ShardTermVectorsService.java @@ -236,7 +236,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent { if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) { analyzer = mapperService.analysisService().analyzer(perFieldAnalyzer.get(field).toString()); } else { - analyzer = mapperService.smartNameFieldMapper(field).indexAnalyzer(); + analyzer = mapperService.smartNameFieldMapper(field).fieldType().indexAnalyzer(); } if (analyzer == null) { analyzer = mapperService.analysisService().defaultIndexAnalyzer(); diff --git a/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index 7547814d791..4c4233ff2c0 100644 --- a/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -37,6 +37,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; import org.elasticsearch.index.shard.IndexShard; @@ -99,7 +100,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL this.closed = true; } - public IndexFieldDataCache buildIndexFieldDataCache(IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) { + public IndexFieldDataCache buildIndexFieldDataCache(IndexService indexService, Index index, MappedFieldType.Names fieldNames, FieldDataType fieldDataType) { return new IndexFieldCache(logger, cache, indicesFieldDataCacheListener, indexService, index, fieldNames, fieldDataType); } @@ -139,12 +140,12 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL private final ESLogger logger; private final IndexService indexService; final Index index; - final FieldMapper.Names fieldNames; + final MappedFieldType.Names fieldNames; final FieldDataType fieldDataType; private final Cache cache; private final IndicesFieldDataCacheListener indicesFieldDataCacheListener; - IndexFieldCache(ESLogger logger,final Cache cache, IndicesFieldDataCacheListener indicesFieldDataCacheListener, IndexService indexService, Index index, FieldMapper.Names fieldNames, FieldDataType fieldDataType) { + IndexFieldCache(ESLogger logger,final Cache cache, IndicesFieldDataCacheListener indicesFieldDataCacheListener, IndexService indexService, Index index, MappedFieldType.Names fieldNames, FieldDataType fieldDataType) { this.logger = logger; this.indexService = indexService; this.index = index; diff --git a/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java b/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java index c9b5f653101..eff99a26e1a 100644 --- a/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java +++ b/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCacheListener.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.indices.breaker.CircuitBreakerService; /** @@ -43,13 +44,14 @@ public class IndicesFieldDataCacheListener implements IndexFieldDataCache.Listen } @Override - public void onLoad(FieldMapper.Names fieldNames, FieldDataType fieldDataType, Accountable fieldData) { + public void onLoad(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, Accountable fieldData) { } @Override - public void onUnload(FieldMapper.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { + public void onUnload(MappedFieldType.Names fieldNames, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) { assert sizeInBytes >= 0 : "When reducing circuit breaker, it should be adjusted with a number higher or equal to 0 and not [" + sizeInBytes + "]"; circuitBreakerService.getBreaker(CircuitBreaker.FIELDDATA).addWithoutBreaking(-sizeInBytes); } } + diff --git a/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java b/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java index 165193d35f0..3233cdcd756 100644 --- a/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java +++ b/src/main/java/org/elasticsearch/percolator/SingleDocumentPercolatorIndex.java @@ -60,8 +60,8 @@ class SingleDocumentPercolatorIndex implements PercolatorIndex { if (tokenStream != null) { memoryIndex.addField(field.name(), tokenStream, field.boost()); } - } catch (IOException e) { - throw new ElasticsearchException("Failed to create token stream", e); + } catch (Exception e) { + throw new ElasticsearchException("Failed to create token stream for [" + field.name() + "]", e); } } context.initialize(new DocEngineSearcher(memoryIndex), parsedDocument); diff --git a/src/main/java/org/elasticsearch/search/SearchService.java b/src/main/java/org/elasticsearch/search/SearchService.java index 15a691f3360..025ac1f6ec9 100644 --- a/src/main/java/org/elasticsearch/search/SearchService.java +++ b/src/main/java/org/elasticsearch/search/SearchService.java @@ -59,7 +59,7 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.TemplateQueryParser; import org.elasticsearch.index.search.stats.StatsGroupsParseElement; @@ -840,8 +840,12 @@ public class SearchService extends AbstractLifecycleComponent { final ObjectSet warmUp = new ObjectHashSet<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { - final String indexName = fieldMapper.names().indexName(); - if (fieldMapper.fieldType().indexOptions() != IndexOptions.NONE && !fieldMapper.fieldType().omitNorms() && fieldMapper.normsLoading(defaultLoading) == Loading.EAGER) { + final String indexName = fieldMapper.fieldType().names().indexName(); + Loading normsLoading = fieldMapper.fieldType().normsLoading(); + if (normsLoading == null) { + normsLoading = defaultLoading; + } + if (fieldMapper.fieldType().indexOptions() != IndexOptions.NONE && !fieldMapper.fieldType().omitNorms() && normsLoading == Loading.EAGER) { warmUp.add(indexName); } } @@ -896,7 +900,7 @@ public class SearchService extends AbstractLifecycleComponent { final Map warmUp = new HashMap<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { - final FieldDataType fieldDataType = fieldMapper.fieldDataType(); + final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType(); if (fieldDataType == null) { continue; } @@ -904,7 +908,7 @@ public class SearchService extends AbstractLifecycleComponent { continue; } - final String indexName = fieldMapper.names().indexName(); + final String indexName = fieldMapper.fieldType().names().indexName(); if (warmUp.containsKey(indexName)) { continue; } @@ -924,10 +928,10 @@ public class SearchService extends AbstractLifecycleComponent { final long start = System.nanoTime(); indexFieldDataService.getForField(fieldMapper).load(ctx); if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); + indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldMapper.fieldType().names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.names().fullName()); + indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldMapper.fieldType().names().fullName()); } finally { latch.countDown(); } @@ -950,14 +954,14 @@ public class SearchService extends AbstractLifecycleComponent { final Map warmUpGlobalOrdinals = new HashMap<>(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (FieldMapper fieldMapper : docMapper.mappers()) { - final FieldDataType fieldDataType = fieldMapper.fieldDataType(); + final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType(); if (fieldDataType == null) { continue; } if (fieldDataType.getLoading() != Loading.EAGER_GLOBAL_ORDINALS) { continue; } - final String indexName = fieldMapper.names().indexName(); + final String indexName = fieldMapper.fieldType().names().indexName(); if (warmUpGlobalOrdinals.containsKey(indexName)) { continue; } @@ -976,10 +980,10 @@ public class SearchService extends AbstractLifecycleComponent { IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldMapper); ifd.loadGlobal(context.reader()); if (indexShard.warmerService().logger().isTraceEnabled()) { - indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); + indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldMapper.fieldType().names().fullName(), TimeValue.timeValueNanos(System.nanoTime() - start)); } } catch (Throwable t) { - indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.names().fullName()); + indexShard.warmerService().logger().warn("failed to warm-up global ordinals for [{}]", t, fieldMapper.fieldType().names().fullName()); } finally { latch.countDown(); } diff --git a/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java b/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java index c3a427c1b48..f8c42c36845 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java +++ b/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ChildrenParser.java @@ -89,7 +89,7 @@ public class ChildrenParser implements Aggregator.Parser { parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter()); childFilter = new QueryWrapperFilter(childDocMapper.typeFilter()); ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper); - config.fieldContext(new FieldContext(parentFieldMapper.names().indexName(), parentChildIndexFieldData, parentFieldMapper)); + config.fieldContext(new FieldContext(parentFieldMapper.fieldType().names().indexName(), parentChildIndexFieldData, parentFieldMapper)); } else { config.unmapped(true); } diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java b/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java index 4591e8392e2..f817c7d79fa 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java +++ b/src/main/java/org/elasticsearch/search/aggregations/support/AggregationContext.java @@ -105,7 +105,7 @@ public class AggregationContext { if (config.fieldContext != null && config.fieldContext.mapper() instanceof DateFieldMapper) { final DateFieldMapper mapper = (DateFieldMapper) config.fieldContext.mapper(); try { - missing = mapper.dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis(); + missing = mapper.fieldType().dateTimeFormatter().parser().parseDateTime(config.missing.toString()).getMillis(); } catch (IllegalArgumentException e) { throw new SearchParseException(context, "Expected a date value in [missing] but got [" + config.missing + "]", null, e); } diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java index d9b01bb8f69..7efef92364c 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java +++ b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java @@ -72,7 +72,7 @@ public class ValueFormat { } public static DateTime mapper(DateFieldMapper mapper) { - return new DateTime(mapper.dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(mapper), ValueParser.DateMath.mapper(mapper)); + return new DateTime(mapper.fieldType().dateTimeFormatter().format(), ValueFormatter.DateTime.mapper(mapper), ValueParser.DateMath.mapper(mapper)); } public DateTime(String pattern, ValueFormatter formatter, ValueParser parser) { diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormatter.java b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormatter.java index f3a47e271a1..0a06b27afd6 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormatter.java +++ b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormatter.java @@ -105,7 +105,7 @@ public interface ValueFormatter extends Streamable { private DateTimeZone timeZone = DateTimeZone.UTC; public static DateTime mapper(DateFieldMapper mapper) { - return new DateTime(mapper.dateTimeFormatter()); + return new DateTime(mapper.fieldType().dateTimeFormatter()); } static final byte ID = 2; diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java index 4314e95b710..ccc57c34288 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java +++ b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java @@ -110,7 +110,7 @@ public interface ValueParser { } public static DateMath mapper(DateFieldMapper mapper) { - return new DateMath(new DateMathParser(mapper.dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT)); + return new DateMath(new DateMathParser(mapper.fieldType().dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT)); } } diff --git a/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 66f4bf991af..9afa3f5fe35 100644 --- a/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -152,7 +152,7 @@ public class FetchPhase implements SearchPhase { if (fieldNames == null) { fieldNames = new HashSet<>(); } - fieldNames.add(mapper.names().indexName()); + fieldNames.add(mapper.fieldType().names().indexName()); } else { if (extractFieldNames == null) { extractFieldNames = newArrayList(); diff --git a/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java index c997624ff60..55ee8f9c21e 100644 --- a/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/FastVectorHighlighter.java @@ -147,10 +147,10 @@ public class FastVectorHighlighter implements Highlighter { // we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible // Only send matched fields if they were requested to save time. if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.names().indexName(), field.fieldOptions().matchedFields(), fragmentCharSize, + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), field.fieldOptions().matchedFields(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } else { - fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.names().indexName(), fragmentCharSize, + fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); } @@ -163,7 +163,7 @@ public class FastVectorHighlighter implements Highlighter { // Essentially we just request that a fragment is built from 0 to noMatchSize using the normal fragmentsBuilder FieldFragList fieldFragList = new SimpleFieldFragList(-1 /*ignored*/); fieldFragList.add(0, noMatchSize, Collections.emptyList()); - fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), mapper.names().indexName(), + fragments = entry.fragmentsBuilder.createFragments(hitContext.reader(), hitContext.docId(), mapper.fieldType().names().indexName(), fieldFragList, 1, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder); if (fragments != null && fragments.length > 0) { return new HighlightField(highlighterContext.fieldName, StringText.convertFromStringArray(fragments)); diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java b/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java index acbba6749cf..3358aec0ed7 100644 --- a/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java +++ b/src/main/java/org/elasticsearch/search/highlight/HighlightUtils.java @@ -47,9 +47,9 @@ public final class HighlightUtils { boolean forceSource = searchContext.highlight().forceSource(field); List textsToHighlight; if (!forceSource && mapper.fieldType().stored()) { - CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(ImmutableSet.of(mapper.names().indexName()), false); + CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(ImmutableSet.of(mapper.fieldType().names().indexName()), false); hitContext.reader().document(hitContext.docId(), fieldVisitor); - textsToHighlight = fieldVisitor.fields().get(mapper.names().indexName()); + textsToHighlight = fieldVisitor.fields().get(mapper.fieldType().names().indexName()); if (textsToHighlight == null) { // Can happen if the document doesn't have the field to highlight textsToHighlight = ImmutableList.of(); @@ -57,7 +57,7 @@ public final class HighlightUtils { } else { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId()); - textsToHighlight = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().fullName())); + textsToHighlight = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); } assert textsToHighlight != null; return textsToHighlight; diff --git a/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java index 460b2df05cd..86a50547cbc 100644 --- a/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/PlainHighlighter.java @@ -69,7 +69,7 @@ public class PlainHighlighter implements Highlighter { org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper); if (entry == null) { - QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.names().indexName() : null); + QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.fieldType().names().indexName() : null); queryScorer.setExpandMultiTermQuery(true); Fragmenter fragmenter; if (field.fieldOptions().numberOfFragments() == 0) { @@ -105,7 +105,7 @@ public class PlainHighlighter implements Highlighter { for (Object textToHighlight : textsToHighlight) { String text = textToHighlight.toString(); - TokenStream tokenStream = analyzer.tokenStream(mapper.names().indexName(), text); + TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().names().indexName(), text); if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { // can't perform highlighting if the stream has no terms (binary token stream) or no offsets continue; @@ -161,7 +161,7 @@ public class PlainHighlighter implements Highlighter { String fieldContents = textsToHighlight.get(0).toString(); int end; try { - end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer.tokenStream(mapper.names().indexName(), fieldContents)); + end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer.tokenStream(mapper.fieldType().names().indexName(), fieldContents)); } catch (Exception e) { throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + highlighterContext.fieldName + "]", e); } diff --git a/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java b/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java index dcbb810d4dd..35f6560899e 100644 --- a/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java +++ b/src/main/java/org/elasticsearch/search/highlight/PostingsHighlighter.java @@ -91,7 +91,7 @@ public class PostingsHighlighter implements Highlighter { } IndexSearcher searcher = new IndexSearcher(hitContext.reader()); - Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.names().indexName(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); + Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().names().indexName(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments); for (Snippet fieldSnippet : fieldSnippets) { if (Strings.hasText(fieldSnippet.getText())) { snippets.add(fieldSnippet); diff --git a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java index 3d338277033..ee824ee13c3 100644 --- a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java +++ b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/FragmentBuilderHelper.java @@ -48,8 +48,8 @@ public final class FragmentBuilderHelper { */ public static WeightedFragInfo fixWeightedFragInfo(FieldMapper mapper, Field[] values, WeightedFragInfo fragInfo) { assert fragInfo != null : "FragInfo must not be null"; - assert mapper.names().indexName().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name(); - if (!fragInfo.getSubInfos().isEmpty() && (containsBrokenAnalysis(mapper.indexAnalyzer()))) { + assert mapper.fieldType().names().indexName().equals(values[0].name()) : "Expected FieldMapper for field " + values[0].name(); + if (!fragInfo.getSubInfos().isEmpty() && (containsBrokenAnalysis(mapper.fieldType().indexAnalyzer()))) { /* This is a special case where broken analysis like WDF is used for term-vector creation at index-time * which can potentially mess up the offsets. To prevent a SAIIOBException we need to resort * the fragments based on their offsets rather than using soley the positions as it is done in diff --git a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java index b7670cb08ec..71393fdf190 100644 --- a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java +++ b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceScoreOrderFragmentsBuilder.java @@ -60,10 +60,10 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); - List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().fullName())); + List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(mapper.names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(mapper.fieldType().names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } diff --git a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java index c52312f5ac0..7a12b449c97 100644 --- a/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java +++ b/src/main/java/org/elasticsearch/search/highlight/vectorhighlight/SourceSimpleFragmentsBuilder.java @@ -56,13 +56,13 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); - List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.names().fullName())); + List values = sourceLookup.extractRawValues(hitContext.getSourcePath(mapper.fieldType().names().fullName())); if (values.isEmpty()) { return EMPTY_FIELDS; } Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { - fields[i] = new Field(mapper.names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); + fields[i] = new Field(mapper.fieldType().names().indexName(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; } diff --git a/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java b/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java index 3cc9b9e1fd7..e634dff6a2f 100644 --- a/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java +++ b/src/main/java/org/elasticsearch/search/lookup/FieldLookup.java @@ -85,7 +85,7 @@ public class FieldLookup { } valueLoaded = true; value = null; - List values = fields.get(mapper.names().indexName()); + List values = fields.get(mapper.fieldType().names().indexName()); return values != null ? value = values.get(0) : null; } @@ -95,6 +95,6 @@ public class FieldLookup { } valuesLoaded = true; values.clear(); - return values = fields().get(mapper.names().indexName()); + return values = fields().get(mapper.fieldType().names().indexName()); } } diff --git a/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java b/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java index 52e0872742a..9936372c9b8 100644 --- a/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java +++ b/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java @@ -144,12 +144,12 @@ public class LeafFieldsLookup implements Map { cachedFieldData.put(name, data); } if (data.fields() == null) { - String fieldName = data.mapper().names().indexName(); + String fieldName = data.mapper().fieldType().names().indexName(); fieldVisitor.reset(fieldName); try { reader.document(docId, fieldVisitor); fieldVisitor.postProcess(data.mapper()); - data.fields(ImmutableMap.of(name, fieldVisitor.fields().get(data.mapper().names().indexName()))); + data.fields(ImmutableMap.of(name, fieldVisitor.fields().get(data.mapper().fieldType().names().indexName()))); } catch (IOException e) { throw new ElasticsearchParseException("failed to load field [" + name + "]", e); } diff --git a/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/src/main/java/org/elasticsearch/search/sort/SortParseElement.java index 3d97b788825..6d1cc200f8a 100644 --- a/src/main/java/org/elasticsearch/search/sort/SortParseElement.java +++ b/src/main/java/org/elasticsearch/search/sort/SortParseElement.java @@ -268,7 +268,7 @@ public class SortParseElement implements SearchParseElement { IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.fieldData().getForField(fieldMapper) .comparatorSource(missing, sortMode, nested); - sortFields.add(new SortField(fieldMapper.names().indexName(), fieldComparatorSource, reverse)); + sortFields.add(new SortField(fieldMapper.fieldType().names().indexName(), fieldComparatorSource, reverse)); } } diff --git a/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java b/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java index b95d7200d08..879b51a6fe2 100644 --- a/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java +++ b/src/main/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProvider.java @@ -253,7 +253,7 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider return new LookupFactory() { @Override public Lookup getLookup(CompletionFieldMapper mapper, CompletionSuggestionContext suggestionContext) { - AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(mapper.names().indexName()); + AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(mapper.fieldType().names().indexName()); if (analyzingSuggestHolder == null) { return null; } @@ -263,19 +263,19 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider final Automaton queryPrefix = mapper.requiresContext() ? ContextQuery.toAutomaton(analyzingSuggestHolder.getPreserveSeparator(), suggestionContext.getContextQueries()) : null; if (suggestionContext.isFuzzy()) { - suggester = new XFuzzySuggester(mapper.indexAnalyzer(), queryPrefix, mapper.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(), - suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), suggestionContext.isFuzzyUnicodeAware(), - analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, - analyzingSuggestHolder.holeCharacter); + suggester = new XFuzzySuggester(mapper.fieldType().indexAnalyzer(), queryPrefix, mapper.fieldType().searchAnalyzer(), flags, + analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, + suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(), + suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), suggestionContext.isFuzzyUnicodeAware(), + analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, + analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, + analyzingSuggestHolder.holeCharacter); } else { - suggester = new XAnalyzingSuggester(mapper.indexAnalyzer(), queryPrefix, mapper.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - analyzingSuggestHolder.preservePositionIncrements, analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, - analyzingSuggestHolder.holeCharacter); + suggester = new XAnalyzingSuggester(mapper.fieldType().indexAnalyzer(), queryPrefix, mapper.fieldType().searchAnalyzer(), flags, + analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, + analyzingSuggestHolder.preservePositionIncrements, analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, + analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, + analyzingSuggestHolder.holeCharacter); } return suggester; } @@ -304,7 +304,7 @@ public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider @Override AnalyzingSuggestHolder getAnalyzingSuggestHolder(CompletionFieldMapper mapper) { - return lookupMap.get(mapper.names().indexName()); + return lookupMap.get(mapper.fieldType().names().indexName()); } @Override diff --git a/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java b/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java index 453dbdc3eb5..91824b75af5 100644 --- a/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java +++ b/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java @@ -191,10 +191,10 @@ public final class PhraseSuggestParser implements SuggestContextParser { throw new IllegalArgumentException("No mapping found for field [" + suggestion.getField() + "]"); } else if (suggestion.getAnalyzer() == null) { // no analyzer name passed in, so try the field's analyzer, or the default analyzer - if (fieldMapper.searchAnalyzer() == null) { + if (fieldMapper.fieldType().searchAnalyzer() == null) { suggestion.setAnalyzer(mapperService.searchAnalyzer()); } else { - suggestion.setAnalyzer(fieldMapper.searchAnalyzer()); + suggestion.setAnalyzer(fieldMapper.fieldType().searchAnalyzer()); } } diff --git a/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 2592c3cba2e..cf9f09204d9 100644 --- a/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -163,8 +163,8 @@ public class PreBuiltAnalyzerTests extends ElasticsearchSingleNodeTest { DocumentMapper docMapper = createIndex("test", indexSettings).mapperService().documentMapperParser().parse(mapping); FieldMapper fieldMapper = docMapper.mappers().getMapper("field"); - assertThat(fieldMapper.searchAnalyzer(), instanceOf(NamedAnalyzer.class)); - NamedAnalyzer fieldMapperNamedAnalyzer = (NamedAnalyzer) fieldMapper.searchAnalyzer(); + assertThat(fieldMapper.fieldType().searchAnalyzer(), instanceOf(NamedAnalyzer.class)); + NamedAnalyzer fieldMapperNamedAnalyzer = (NamedAnalyzer) fieldMapper.fieldType().searchAnalyzer(); assertThat(fieldMapperNamedAnalyzer.analyzer(), is(namedAnalyzer.analyzer())); } diff --git a/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java b/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java index 99bc38b5c84..ceeb448586b 100644 --- a/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java +++ b/src/test/java/org/elasticsearch/index/fielddata/NoOrdinalsStringFieldDataTests.java @@ -24,7 +24,7 @@ import org.apache.lucene.index.IndexReader; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.search.MultiValueMode; import org.junit.Test; diff --git a/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java b/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java index 75c8e18fc91..ab336cf7dab 100644 --- a/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/FieldMappersLookupTests.java @@ -176,17 +176,20 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase { static class FakeFieldMapper extends AbstractFieldMapper { static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); public FakeFieldMapper(String fullName, String indexName) { - super(new Names(fullName, indexName, indexName, fullName), 1.0f, AbstractFieldMapper.Defaults.FIELD_TYPE, null, null, null, null, null, null, dummySettings, null, null); + super(makeFieldType(fullName, indexName), null, null, dummySettings, null, null); + } + static MappedFieldType makeFieldType(String fullName, String indexName) { + MappedFieldType fieldType = Defaults.FIELD_TYPE.clone(); + fieldType.setNames(new MappedFieldType.Names(fullName, indexName, indexName, fullName)); + return fieldType; } @Override - public FieldType defaultFieldType() { return null; } + public MappedFieldType defaultFieldType() { return null; } @Override public FieldDataType defaultFieldDataType() { return null; } @Override protected String contentType() { return null; } @Override protected void parseCreateField(ParseContext context, List list) throws IOException {} - @Override - public String value(Object value) { return null; } } } diff --git a/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 36c218f3d7c..b0ff89f414d 100644 --- a/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -306,9 +306,9 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest { assertThat(field, nullValue()); } if (similarity == null || similarity.equals("TF/IDF")) { - assertThat(builtDocMapper.allFieldMapper().similarity(), nullValue()); + assertThat(builtDocMapper.allFieldMapper().fieldType().similarity(), nullValue()); } else { - assertThat(similarity, equalTo(builtDocMapper.allFieldMapper().similarity().name())); + assertThat(similarity, equalTo(builtDocMapper.allFieldMapper().fieldType().similarity().name())); } assertThat(builtMapping.contains("fielddata"), is(fieldData)); if (allDefault) { diff --git a/src/test/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapperTests.java index c09741ba17e..d28609e5bee 100644 --- a/src/test/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/core/Murmur3FieldMapperTests.java @@ -108,7 +108,7 @@ public class Murmur3FieldMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper docMapper = parser.parse(mapping); Murmur3FieldMapper mapper = (Murmur3FieldMapper)docMapper.mappers().getMapper("field"); - assertFalse(mapper.hasDocValues()); + assertFalse(mapper.fieldType().hasDocValues()); } public void testIndexSettingBackcompat() throws Exception { diff --git a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index a8bf1476465..45c6322f359 100755 --- a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -23,7 +23,6 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.spatial4j.core.shape.Point; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.builders.ShapeBuilder; @@ -32,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; @@ -61,15 +61,6 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; * .shape GeoShape type */ public class ExternalMapper extends AbstractFieldMapper { - /** - * Returns the actual value of the field. - * - * @param value - */ - @Override - public Object value(Object value) { - return null; - } public static class Names { public static final String FIELD_BIN = "bin"; @@ -89,7 +80,7 @@ public class ExternalMapper extends AbstractFieldMapper { private String mapperName; public Builder(String name, String generatedValue, String mapperName) { - super(name, new FieldType(Defaults.FIELD_TYPE)); + super(name, Defaults.FIELD_TYPE); this.builder = this; this.stringBuilder = stringField(name).store(false); this.generatedValue = generatedValue; @@ -115,8 +106,9 @@ public class ExternalMapper extends AbstractFieldMapper { context.path().remove(); context.path().pathType(origPathType); + setupFieldType(context); - return new ExternalMapper(buildNames(context), generatedValue, mapperName, binMapper, boolMapper, pointMapper, shapeMapper, stringMapper, + return new ExternalMapper(fieldType, generatedValue, mapperName, binMapper, boolMapper, pointMapper, shapeMapper, stringMapper, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } @@ -159,11 +151,11 @@ public class ExternalMapper extends AbstractFieldMapper { private final GeoShapeFieldMapper shapeMapper; private final FieldMapper stringMapper; - public ExternalMapper(FieldMapper.Names names, + public ExternalMapper(MappedFieldType fieldType, String generatedValue, String mapperName, BinaryFieldMapper binMapper, BooleanFieldMapper boolMapper, GeoPointFieldMapper pointMapper, GeoShapeFieldMapper shapeMapper, FieldMapper stringMapper, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(names, 1.0f, Defaults.FIELD_TYPE, false, null, null, null, null, null, indexSettings, + super(fieldType, false, null, indexSettings, multiFields, copyTo); this.generatedValue = generatedValue; this.mapperName = mapperName; @@ -175,7 +167,7 @@ public class ExternalMapper extends AbstractFieldMapper { } @Override - public FieldType defaultFieldType() { + public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @@ -236,7 +228,7 @@ public class ExternalMapper extends AbstractFieldMapper { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(names().shortName()); + builder.startObject(fieldType().names().shortName()); builder.field("type", mapperName); multiFields.toXContent(builder, params); builder.endObject(); diff --git a/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java index b959bb41ab6..d31e2a1b01b 100644 --- a/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapperTests.java @@ -55,12 +55,12 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(GeoShapeFieldMapper.Defaults.DISTANCE_ERROR_PCT)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoShapeFieldMapper.Defaults.GEOHASH_LEVELS)); - assertThat(geoShapeFieldMapper.orientation(), equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION)); + assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION)); } /** @@ -79,7 +79,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - ShapeBuilder.Orientation orientation = ((GeoShapeFieldMapper)fieldMapper).orientation(); + ShapeBuilder.Orientation orientation = ((GeoShapeFieldMapper)fieldMapper).fieldType().orientation(); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW)); @@ -96,7 +96,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { fieldMapper = defaultMapper.mappers().getMapper("location"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - orientation = ((GeoShapeFieldMapper)fieldMapper).orientation(); + orientation = ((GeoShapeFieldMapper)fieldMapper).fieldType().orientation(); assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW)); @@ -118,7 +118,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.1)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); @@ -141,7 +141,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); @@ -169,7 +169,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); @@ -193,7 +193,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); // distance_error_pct was not specified so we expect the mapper to take the highest precision between "precision" and // "tree_levels" setting distErrPct to 0 to guarantee desired precision @@ -219,7 +219,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); @@ -243,7 +243,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); @@ -266,7 +266,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); @@ -292,7 +292,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); @@ -314,7 +314,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy.getDistErrPct(), equalTo(0.5)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); @@ -351,13 +351,13 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.defaultStrategy(); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); assertThat(strategy.getDistErrPct(), equalTo(0.01)); assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); - assertThat(geoShapeFieldMapper.orientation(), equalTo(ShapeBuilder.Orientation.CCW)); + assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW)); // correct mapping stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -373,12 +373,12 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - strategy = geoShapeFieldMapper.defaultStrategy(); + strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); assertThat(strategy.getDistErrPct(), equalTo(0.001)); assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); - assertThat(geoShapeFieldMapper.orientation(), equalTo(ShapeBuilder.Orientation.CW)); + assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CW)); } } diff --git a/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java b/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java index 50cc9968466..12d5211f087 100644 --- a/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/geo/GeohashMappingGeoPointTests.java @@ -96,25 +96,25 @@ public class GeohashMappingGeoPointTests extends ElasticsearchSingleNodeTest { @Test public void testGeoHashPrecisionAsInteger() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash_precision", 10).endObject().endObject() + .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", 10).endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); assertThat(mapper, instanceOf(GeoPointFieldMapper.class)); GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) mapper; - assertThat(geoPointFieldMapper.geoHashPrecision(), is(10)); + assertThat(geoPointFieldMapper.fieldType().geohashPrecision(), is(10)); } @Test public void testGeoHashPrecisionAsLength() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash_precision", "5m").endObject().endObject() + .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", "5m").endObject().endObject() .endObject().endObject().string(); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); assertThat(mapper, instanceOf(GeoPointFieldMapper.class)); GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) mapper; - assertThat(geoPointFieldMapper.geoHashPrecision(), is(10)); + assertThat(geoPointFieldMapper.fieldType().geohashPrecision(), is(10)); } @Test diff --git a/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java index 8da6e71c135..a8ce3c145a7 100644 --- a/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapperTests.java @@ -69,7 +69,7 @@ public class FieldNamesFieldMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); FieldNamesFieldMapper fieldNamesMapper = docMapper.rootMapper(FieldNamesFieldMapper.class); - assertFalse(fieldNamesMapper.hasDocValues()); + assertFalse(fieldNamesMapper.fieldType().hasDocValues()); assertEquals(IndexOptions.DOCS, fieldNamesMapper.fieldType().indexOptions()); assertFalse(fieldNamesMapper.fieldType().tokenized()); assertFalse(fieldNamesMapper.fieldType().stored()); diff --git a/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index 620847559ee..26ffbefdd1c 100644 --- a/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -130,11 +130,11 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper existing = parser.parse(mapping1); DocumentMapper changed = parser.parse(mapping2); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("whitespace")); + assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); MergeResult mergeResult = existing.merge(changed.mapping(), false); assertThat(mergeResult.hasConflicts(), equalTo(false)); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("keyword")); + assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword")); } @Test @@ -150,11 +150,11 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper existing = parser.parse(mapping1); DocumentMapper changed = parser.parse(mapping2); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("whitespace")); + assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace")); MergeResult mergeResult = existing.merge(changed.mapping(), false); assertThat(mergeResult.hasConflicts(), equalTo(false)); - assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").searchAnalyzer()).name(), equalTo("standard")); + assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard")); assertThat(((StringFieldMapper) (existing.mappers().getMapper("field"))).getIgnoreAbove(), equalTo(14)); } diff --git a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java index e7df72c3dcd..4c845b4a91f 100644 --- a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldTests.java @@ -27,14 +27,18 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.core.*; +import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.mapper.core.DateFieldMapper; +import org.elasticsearch.index.mapper.core.LongFieldMapper; +import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; -import org.elasticsearch.index.IndexService; import org.elasticsearch.test.ElasticsearchSingleNodeTest; import org.junit.Test; @@ -46,8 +50,12 @@ import java.util.TreeMap; import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.mapper.MapperBuilders.*; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.index.mapper.MapperBuilders.doc; +import static org.elasticsearch.index.mapper.MapperBuilders.rootObject; +import static org.elasticsearch.index.mapper.MapperBuilders.stringField; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; /** * @@ -119,7 +127,7 @@ public class MultiFieldTests extends ElasticsearchSingleNodeTest { assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name.test1").fieldType().indexOptions()); assertThat(docMapper.mappers().getMapper("name.test1").fieldType().stored(), equalTo(true)); assertThat(docMapper.mappers().getMapper("name.test1").fieldType().tokenized(), equalTo(true)); - assertThat(docMapper.mappers().getMapper("name.test1").fieldDataType().getLoading(), equalTo(FieldMapper.Loading.EAGER)); + assertThat(docMapper.mappers().getMapper("name.test1").fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER)); assertThat(docMapper.mappers().getMapper("name.test2"), notNullValue()); assertThat(docMapper.mappers().getMapper("name.test2"), instanceOf(TokenCountFieldMapper.class)); diff --git a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationTests.java b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationTests.java index 3e5c29daade..60609d82b1a 100644 --- a/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/multifield/MultiFieldsIntegrationTests.java @@ -113,6 +113,7 @@ public class MultiFieldsIntegrationTests extends ElasticsearchIntegrationTest { assertThat(mappingMetaData, not(nullValue())); Map mappingSource = mappingMetaData.sourceAsMap(); Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource)); + logger.info("Keys: " + aField.keySet()); assertThat(aField.size(), equalTo(2)); assertThat(aField.get("type").toString(), equalTo("geo_point")); assertThat(aField.get("fields"), notNullValue()); diff --git a/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java index 5ab240857b3..c429a012f86 100644 --- a/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/simple/SimpleMapperTests.java @@ -52,7 +52,7 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse("person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); // System.out.println("Document: " + doc); // System.out.println("Json: " + docMapper.sourceMapper().value(doc)); doc = docMapper.parse("person", "1", json).rootDoc(); @@ -71,8 +71,8 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper builtDocMapper = parser.parse(builtMapping); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = builtDocMapper.parse("person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); // System.out.println("Document: " + doc); // System.out.println("Json: " + docMapper.sourceMapper().value(doc)); } @@ -86,8 +86,8 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest { BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json")); Document doc = docMapper.parse("person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); // System.out.println("Document: " + doc); // System.out.println("Json: " + docMapper.sourceMapper().value(doc)); } @@ -98,8 +98,8 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest { DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-notype-noid.json")); Document doc = docMapper.parse("person", "1", json).rootDoc(); - assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1"))); - assertThat(doc.get(docMapper.mappers().getMapper("name.first").names().indexName()), equalTo("shay")); + assertThat(doc.get(docMapper.uidMapper().fieldType().names().indexName()), equalTo(Uid.createUid("person", "1"))); + assertThat(doc.get(docMapper.mappers().getMapper("name.first").fieldType().names().indexName()), equalTo("shay")); // System.out.println("Document: " + doc); // System.out.println("Json: " + docMapper.sourceMapper().value(doc)); } diff --git a/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index 5b429d093c2..0583e289994 100644 --- a/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -54,6 +54,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.Map; +import static org.elasticsearch.index.mapper.core.StringFieldMapper.Builder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -371,9 +372,9 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest { DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); final BuilderContext ctx = new BuilderContext(indexService.settingsService().getSettings(), new ContentPath(1)); - assertFalse(new StringFieldMapper.Builder("anything").index(false).build(ctx).hasDocValues()); - assertTrue(new StringFieldMapper.Builder("anything").index(false).fieldDataSettings(DOC_VALUES_SETTINGS).build(ctx).hasDocValues()); - assertTrue(new StringFieldMapper.Builder("anything").index(false).docValues(true).build(ctx).hasDocValues()); + assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues()); + assertTrue(new Builder("anything").index(false).fieldDataSettings(DOC_VALUES_SETTINGS).build(ctx).fieldType().hasDocValues()); + assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") @@ -414,11 +415,11 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest { fail(); } catch (Exception e) { /* OK */ } - assertFalse(new StringFieldMapper.Builder("anything").index(false).build(ctx).hasDocValues()); - assertTrue(new StringFieldMapper.Builder("anything").index(true).tokenized(false).build(ctx).hasDocValues()); - assertFalse(new StringFieldMapper.Builder("anything").index(true).tokenized(true).build(ctx).hasDocValues()); - assertFalse(new StringFieldMapper.Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).hasDocValues()); - assertTrue(new StringFieldMapper.Builder("anything").index(false).docValues(true).build(ctx).hasDocValues()); + assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues()); + assertTrue(new Builder("anything").index(true).tokenized(false).build(ctx).fieldType().hasDocValues()); + assertFalse(new Builder("anything").index(true).tokenized(true).build(ctx).fieldType().hasDocValues()); + assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues()); + assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") diff --git a/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index e415ea76c07..3e40a84c931 100644 --- a/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -38,7 +38,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; @@ -113,8 +113,8 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0) ? true : false)); assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions())); assertThat(docMapper.timestampFieldMapper().path(), equalTo(TimestampFieldMapper.Defaults.PATH)); - assertThat(docMapper.timestampFieldMapper().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT)); - assertThat(docMapper.timestampFieldMapper().hasDocValues(), equalTo(false)); + assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT)); + assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(false)); assertAcked(client().admin().indices().prepareDelete("test").execute().get()); } } @@ -135,8 +135,8 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(false)); assertEquals(IndexOptions.NONE, docMapper.timestampFieldMapper().fieldType().indexOptions()); assertThat(docMapper.timestampFieldMapper().path(), equalTo("timestamp")); - assertThat(docMapper.timestampFieldMapper().dateTimeFormatter().format(), equalTo("year")); - assertThat(docMapper.timestampFieldMapper().hasDocValues(), equalTo(true)); + assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo("year")); + assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(true)); } @Test @@ -507,16 +507,16 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser(); DocumentMapper docMapper = parser.parse(mapping); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.LAZY)); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject() .endObject().endObject().string(); MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), false); assertThat(mergeResult.buildConflicts().length, equalTo(0)); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.EAGER)); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(indexSettings), equalTo("array")); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER)); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array")); } @Test @@ -574,7 +574,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { DocumentMapperParser parser = createIndex("test", indexSettings).mapperService().documentMapperParser(); DocumentMapper docMapper = parser.parse(mapping); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.LAZY)); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_timestamp").field("enabled", false) .startObject("fielddata").field("format", "array").endObject() @@ -597,9 +597,9 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict)); } assertTrue("missing conflicts: " + Arrays.toString(expectedConflicts.toArray()), expectedConflicts.isEmpty()); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.LAZY)); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.LAZY)); assertTrue(docMapper.timestampFieldMapper().enabled()); - assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); + assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("doc_values")); } @Test @@ -739,9 +739,9 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { void assertDocValuesSerialization(String mapping) throws Exception { DocumentMapperParser parser = createIndex("test_doc_values").mapperService().documentMapperParser(); DocumentMapper docMapper = parser.parse(mapping); - boolean docValues= docMapper.timestampFieldMapper().hasDocValues(); + boolean docValues = docMapper.timestampFieldMapper().fieldType().hasDocValues(); docMapper = parser.parse(docMapper.mappingSource().string()); - assertThat(docMapper.timestampFieldMapper().hasDocValues(), equalTo(docValues)); + assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(docValues)); assertAcked(client().admin().indices().prepareDelete("test_doc_values")); } diff --git a/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java index 1090f6df85a..588891d1cf4 100644 --- a/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingTests.java @@ -149,10 +149,10 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest { DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedString(indexMapping.string()), true); assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); - assertTrue(documentMapper.timestampFieldMapper().hasDocValues()); + assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues()); documentMapper = indexService.mapperService().parse("type", new CompressedString(documentMapper.mappingSource().string()), true); assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); - assertTrue(documentMapper.timestampFieldMapper().hasDocValues()); + assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues()); assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); } diff --git a/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java b/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java index c2c98553bbf..3f4119b4739 100644 --- a/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java +++ b/src/test/java/org/elasticsearch/index/similarity/SimilarityTests.java @@ -56,9 +56,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(DefaultSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(DefaultSimilarityProvider.class)); - DefaultSimilarity similarity = (DefaultSimilarity) documentMapper.mappers().getMapper("field1").similarity().get(); + DefaultSimilarity similarity = (DefaultSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); } @@ -78,9 +78,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(BM25SimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(BM25SimilarityProvider.class)); - BM25Similarity similarity = (BM25Similarity) documentMapper.mappers().getMapper("field1").similarity().get(); + BM25Similarity similarity = (BM25Similarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getK1(), equalTo(2.0f)); assertThat(similarity.getB(), equalTo(1.5f)); assertThat(similarity.getDiscountOverlaps(), equalTo(false)); @@ -103,9 +103,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(DFRSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(DFRSimilarityProvider.class)); - DFRSimilarity similarity = (DFRSimilarity) documentMapper.mappers().getMapper("field1").similarity().get(); + DFRSimilarity similarity = (DFRSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getBasicModel(), instanceOf(BasicModelG.class)); assertThat(similarity.getAfterEffect(), instanceOf(AfterEffectL.class)); assertThat(similarity.getNormalization(), instanceOf(NormalizationH2.class)); @@ -129,9 +129,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(IBSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(IBSimilarityProvider.class)); - IBSimilarity similarity = (IBSimilarity) documentMapper.mappers().getMapper("field1").similarity().get(); + IBSimilarity similarity = (IBSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getDistribution(), instanceOf(DistributionSPL.class)); assertThat(similarity.getLambda(), instanceOf(LambdaTTF.class)); assertThat(similarity.getNormalization(), instanceOf(NormalizationH2.class)); @@ -152,9 +152,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(LMDirichletSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(LMDirichletSimilarityProvider.class)); - LMDirichletSimilarity similarity = (LMDirichletSimilarity) documentMapper.mappers().getMapper("field1").similarity().get(); + LMDirichletSimilarity similarity = (LMDirichletSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getMu(), equalTo(3000f)); } @@ -172,9 +172,9 @@ public class SimilarityTests extends ElasticsearchSingleNodeTest { .build(); SimilarityService similarityService = createIndex("foo", indexSettings).similarityService(); DocumentMapper documentMapper = similarityService.mapperService().documentMapperParser().parse(mapping); - assertThat(documentMapper.mappers().getMapper("field1").similarity(), instanceOf(LMJelinekMercerSimilarityProvider.class)); + assertThat(documentMapper.mappers().getMapper("field1").fieldType().similarity(), instanceOf(LMJelinekMercerSimilarityProvider.class)); - LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) documentMapper.mappers().getMapper("field1").similarity().get(); + LMJelinekMercerSimilarity similarity = (LMJelinekMercerSimilarity) documentMapper.mappers().getMapper("field1").fieldType().similarity().get(); assertThat(similarity.getLambda(), equalTo(0.7f)); } } diff --git a/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java b/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java index 637c877caf3..adf1fbf2a04 100644 --- a/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java +++ b/src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.engine.Segment; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.cache.query.IndicesQueryCache; import org.elasticsearch.search.SearchService; diff --git a/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingTest.java b/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingTest.java index 733f0a26cd2..b61ad6e61df 100644 --- a/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingTest.java +++ b/src/test/java/org/elasticsearch/search/child/ParentFieldLoadingTest.java @@ -26,11 +26,11 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.merge.policy.MergePolicyModule; -import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ElasticsearchIntegrationTest; @@ -62,7 +62,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { assertAcked(prepareCreate("test") .setSettings(indexSettings) .addMapping("parent") - .addMapping("child", childMapping(FieldMapper.Loading.LAZY))); + .addMapping("child", childMapping(MappedFieldType.Loading.LAZY))); ensureGreen(); client().prepareIndex("test", "parent", "1").setSource("{}").get(); @@ -93,7 +93,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { assertAcked(prepareCreate("test") .setSettings(indexSettings) .addMapping("parent") - .addMapping("child", childMapping(FieldMapper.Loading.EAGER))); + .addMapping("child", childMapping(MappedFieldType.Loading.EAGER))); ensureGreen(); client().prepareIndex("test", "parent", "1").setSource("{}").get(); @@ -108,7 +108,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { assertAcked(prepareCreate("test") .setSettings(indexSettings) .addMapping("parent") - .addMapping("child", childMapping(FieldMapper.Loading.EAGER_GLOBAL_ORDINALS))); + .addMapping("child", childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS))); ensureGreen(); // Need to do 2 separate refreshes, otherwise we have 1 segment and then we can't measure if global ordinals @@ -139,7 +139,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { assertThat(fielddataSizeDefault, greaterThan(0l)); PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("child") - .setSource(childMapping(FieldMapper.Loading.EAGER_GLOBAL_ORDINALS)) + .setSource(childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS)) .get(); assertAcked(putMappingResponse); assertBusy(new Runnable() { @@ -156,7 +156,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { MapperService mapperService = indexService.mapperService(); DocumentMapper documentMapper = mapperService.documentMapper("child"); if (documentMapper != null) { - verified = documentMapper.parentFieldMapper().fieldDataType().getLoading() == FieldMapper.Loading.EAGER_GLOBAL_ORDINALS; + verified = documentMapper.parentFieldMapper().fieldType().fieldDataType().getLoading() == MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS; } } assertTrue(verified); @@ -171,10 +171,10 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest { assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(fielddataSizeDefault)); } - private XContentBuilder childMapping(FieldMapper.Loading loading) throws IOException { + private XContentBuilder childMapping(MappedFieldType.Loading loading) throws IOException { return jsonBuilder().startObject().startObject("child").startObject("_parent") .field("type", "parent") - .startObject("fielddata").field(FieldMapper.Loading.KEY, loading).endObject() + .startObject("fielddata").field(MappedFieldType.Loading.KEY, loading).endObject() .endObject().endObject().endObject(); } diff --git a/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java b/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java index 0d4329d5bb7..83bcdb44397 100644 --- a/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java +++ b/src/test/java/org/elasticsearch/search/child/SimpleChildQuerySearchTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.index.cache.filter.FilterCacheModule; import org.elasticsearch.index.cache.filter.FilterCacheModule.FilterCacheSettings; import org.elasticsearch.index.cache.filter.index.IndexFilterCache; import org.elasticsearch.index.fielddata.FieldDataType; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.query.HasChildQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; diff --git a/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationTests.java b/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationTests.java index 13e4d0d6554..eb6a6629aac 100644 --- a/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationTests.java +++ b/src/test/java/org/elasticsearch/search/geo/GeoShapeIntegrationTests.java @@ -461,7 +461,7 @@ public class GeoShapeIntegrationTests extends ElasticsearchIntegrationTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper)fieldMapper; - ShapeBuilder.Orientation orientation = gsfm.orientation(); + ShapeBuilder.Orientation orientation = gsfm.fieldType().orientation(); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW)); @@ -473,7 +473,7 @@ public class GeoShapeIntegrationTests extends ElasticsearchIntegrationTest { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); gsfm = (GeoShapeFieldMapper)fieldMapper; - orientation = gsfm.orientation(); + orientation = gsfm.fieldType().orientation(); assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT)); assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW)); diff --git a/src/test/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProviderV1.java b/src/test/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProviderV1.java index d8cd3dd3fc7..a2867abcd81 100644 --- a/src/test/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProviderV1.java +++ b/src/test/java/org/elasticsearch/search/suggest/completion/AnalyzingCompletionLookupProviderV1.java @@ -59,6 +59,8 @@ import java.util.Map; import java.util.Set; import java.util.TreeMap; +import static org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester.HOLE_CHARACTER; + /** * This is an older implementation of the AnalyzingCompletionLookupProvider class * We use this to test for backwards compatibility in our tests, namely @@ -232,7 +234,7 @@ public class AnalyzingCompletionLookupProviderV1 extends CompletionLookupProvide return new LookupFactory() { @Override public Lookup getLookup(CompletionFieldMapper mapper, CompletionSuggestionContext suggestionContext) { - AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(mapper.names().indexName()); + AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(mapper.fieldType().names().indexName()); if (analyzingSuggestHolder == null) { return null; } @@ -242,18 +244,18 @@ public class AnalyzingCompletionLookupProviderV1 extends CompletionLookupProvide XAnalyzingSuggester suggester; if (suggestionContext.isFuzzy()) { - suggester = new XFuzzySuggester(mapper.indexAnalyzer(), queryPrefix, mapper.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(), - suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), false, - analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, SEP_LABEL, PAYLOAD_SEP, END_BYTE, XAnalyzingSuggester.HOLE_CHARACTER); + suggester = new XFuzzySuggester(mapper.fieldType().indexAnalyzer(), queryPrefix, mapper.fieldType().searchAnalyzer(), flags, + analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, + suggestionContext.getFuzzyEditDistance(), suggestionContext.isFuzzyTranspositions(), + suggestionContext.getFuzzyPrefixLength(), suggestionContext.getFuzzyMinLength(), false, + analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, + analyzingSuggestHolder.maxAnalyzedPathsForOneInput, SEP_LABEL, PAYLOAD_SEP, END_BYTE, HOLE_CHARACTER); } else { - suggester = new XAnalyzingSuggester(mapper.indexAnalyzer(), queryPrefix, mapper.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - analyzingSuggestHolder.preservePositionIncrements, - analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, SEP_LABEL, PAYLOAD_SEP, END_BYTE, XAnalyzingSuggester.HOLE_CHARACTER); + suggester = new XAnalyzingSuggester(mapper.fieldType().indexAnalyzer(), queryPrefix, mapper.fieldType().searchAnalyzer(), flags, + analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, + analyzingSuggestHolder.preservePositionIncrements, + analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, + analyzingSuggestHolder.maxAnalyzedPathsForOneInput, SEP_LABEL, PAYLOAD_SEP, END_BYTE, HOLE_CHARACTER); } return suggester; } @@ -285,7 +287,7 @@ public class AnalyzingCompletionLookupProviderV1 extends CompletionLookupProvide @Override AnalyzingSuggestHolder getAnalyzingSuggestHolder(CompletionFieldMapper mapper) { - return lookupMap.get(mapper.names().indexName()); + return lookupMap.get(mapper.fieldType().names().indexName()); } @Override diff --git a/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java b/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java index 757723ef4d0..d77d6308b62 100644 --- a/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java +++ b/src/test/java/org/elasticsearch/search/suggest/completion/CompletionPostingsFormatTest.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.suggest.completion; import com.google.common.collect.Lists; - import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldsConsumer; @@ -55,7 +54,8 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.codec.postingsformat.Elasticsearch090PostingsFormat; -import org.elasticsearch.index.mapper.FieldMapper.Names; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Names; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.search.suggest.SuggestUtils; @@ -76,8 +76,16 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; public class CompletionPostingsFormatTest extends ElasticsearchTestCase { - + Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build(); + static final MappedFieldType FIELD_TYPE = CompletionFieldMapper.Defaults.FIELD_TYPE.clone(); + static final NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer()); + static { + FIELD_TYPE.setNames(new Names("foo")); + FIELD_TYPE.setIndexAnalyzer(analyzer); + FIELD_TYPE.setSearchAnalyzer(analyzer); + FIELD_TYPE.freeze(); + } @Test public void testCompletionPostingsFormat() throws IOException { @@ -92,8 +100,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = currentProvider.load(input); PostingsFormat format = PostingsFormat.forName(Lucene.LATEST_POSTINGS_FORMAT); - NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer()); - Lookup lookup = load.getLookup(new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING), new CompletionSuggestionContext(null)); + Lookup lookup = load.getLookup(new CompletionFieldMapper(FIELD_TYPE, format, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING), new CompletionSuggestionContext(null)); List result = lookup.lookup("ge", false, 10); assertThat(result.get(0).key.toString(), equalTo("Generator - Foo Fighters")); assertThat(result.get(0).payload.utf8ToString(), equalTo("id:10")); @@ -111,8 +118,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = currentProvider.load(input); PostingsFormat format = new Elasticsearch090PostingsFormat(); - NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer()); - AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder analyzingSuggestHolder = load.getAnalyzingSuggestHolder(new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING)); + AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder analyzingSuggestHolder = load.getAnalyzingSuggestHolder(new CompletionFieldMapper(FIELD_TYPE, format, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING)); assertThat(analyzingSuggestHolder.sepLabel, is(AnalyzingCompletionLookupProviderV1.SEP_LABEL)); assertThat(analyzingSuggestHolder.payloadSep, is(AnalyzingCompletionLookupProviderV1.PAYLOAD_SEP)); assertThat(analyzingSuggestHolder.endByte, is(AnalyzingCompletionLookupProviderV1.END_BYTE)); @@ -129,8 +135,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = currentProvider.load(input); PostingsFormat format = new Elasticsearch090PostingsFormat(); - NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer()); - AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder analyzingSuggestHolder = load.getAnalyzingSuggestHolder(new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING)); + AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder analyzingSuggestHolder = load.getAnalyzingSuggestHolder(new CompletionFieldMapper(FIELD_TYPE, format, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING)); assertThat(analyzingSuggestHolder.sepLabel, is(XAnalyzingSuggester.SEP_LABEL)); assertThat(analyzingSuggestHolder.payloadSep, is(XAnalyzingSuggester.PAYLOAD_SEP)); assertThat(analyzingSuggestHolder.endByte, is(XAnalyzingSuggester.END_BYTE)); @@ -237,8 +242,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { reference.build(iter); PostingsFormat provider = PostingsFormat.forName(Lucene.LATEST_POSTINGS_FORMAT); - NamedAnalyzer namedAnalzyer = new NamedAnalyzer("foo", new StandardAnalyzer()); - final CompletionFieldMapper mapper = new CompletionFieldMapper(new Names("foo"), namedAnalzyer, namedAnalzyer, provider, null, usePayloads, + final CompletionFieldMapper mapper = new CompletionFieldMapper(FIELD_TYPE, provider, usePayloads, preserveSeparators, preservePositionIncrements, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING); Lookup buildAnalyzingLookup = buildAnalyzingLookup(mapper, titles, titles, weights); Field field = buildAnalyzingLookup.getClass().getDeclaredField("maxAnalyzedPathsForOneInput"); @@ -250,7 +254,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { for (int i = 0; i < titles.length; i++) { int res = between(1, 10); final StringBuilder builder = new StringBuilder(); - SuggestUtils.analyze(namedAnalzyer.tokenStream("foo", titles[i]), new SuggestUtils.TokenConsumer() { + SuggestUtils.analyze(analyzer.tokenStream("foo", titles[i]), new SuggestUtils.TokenConsumer() { @Override public void nextToken() throws IOException { if (builder.length() == 0) { @@ -285,7 +289,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { return mapper.postingsFormat(in); } }; - IndexWriterConfig indexWriterConfig = new IndexWriterConfig(mapper.indexAnalyzer()); + IndexWriterConfig indexWriterConfig = new IndexWriterConfig(mapper.fieldType().indexAnalyzer()); indexWriterConfig.setCodec(filterCodec); IndexWriter writer = new IndexWriter(dir, indexWriterConfig); @@ -305,7 +309,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { assertThat(reader.leaves().size(), equalTo(1)); assertThat(reader.leaves().get(0).reader().numDocs(), equalTo(weights.length)); LeafReaderContext atomicReaderContext = reader.leaves().get(0); - Terms luceneTerms = atomicReaderContext.reader().terms(mapper.names().fullName()); + Terms luceneTerms = atomicReaderContext.reader().terms(mapper.fieldType().names().fullName()); Lookup lookup = ((Completion090PostingsFormat.CompletionTerms) luceneTerms).getLookup(mapper, new CompletionSuggestionContext(null)); reader.close(); writer.close(); @@ -340,8 +344,7 @@ public class CompletionPostingsFormatTest extends ElasticsearchTestCase { IndexInput input = dir.openInput("foo.txt", IOContext.DEFAULT); LookupFactory load = provider.load(input); PostingsFormat format = new Elasticsearch090PostingsFormat(); - NamedAnalyzer analyzer = new NamedAnalyzer("foo", new StandardAnalyzer()); - assertNull(load.getLookup(new CompletionFieldMapper(new Names("foo"), analyzer, analyzer, format, null, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING), new CompletionSuggestionContext(null))); + assertNull(load.getLookup(new CompletionFieldMapper(FIELD_TYPE, format, true, true, true, Integer.MAX_VALUE, indexSettings, AbstractFieldMapper.MultiFields.empty(), null, ContextMapping.EMPTY_MAPPING), new CompletionSuggestionContext(null))); dir.close(); } diff --git a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java index 81f0e9f39bf..621d4fc4d01 100644 --- a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java +++ b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java @@ -98,7 +98,8 @@ import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.FieldMapper.Loading; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MappedFieldType.Loading; import org.elasticsearch.index.mapper.internal.SizeFieldMapper; import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.merge.policy.*; @@ -558,7 +559,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase private static Settings.Builder setRandomNormsLoading(Random random, Settings.Builder builder) { if (random.nextBoolean()) { - builder.put(SearchService.NORMS_LOADING_KEY, RandomPicks.randomFrom(random, Arrays.asList(FieldMapper.Loading.EAGER, FieldMapper.Loading.LAZY))); + builder.put(SearchService.NORMS_LOADING_KEY, RandomPicks.randomFrom(random, Arrays.asList(MappedFieldType.Loading.EAGER, MappedFieldType.Loading.LAZY))); } return builder; } From 6598d82ae8f1ce36991ddc7af2a7e53a3f1a8541 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 3 Jun 2015 11:56:25 +0200 Subject: [PATCH 02/16] fix bad merge for parent field mapper, and change most field types to be pkg-private and final --- .../elasticsearch/index/mapper/core/BinaryFieldMapper.java | 2 +- .../elasticsearch/index/mapper/core/BooleanFieldMapper.java | 2 +- .../elasticsearch/index/mapper/core/ByteFieldMapper.java | 2 +- .../index/mapper/core/CompletionFieldMapper.java | 2 +- .../elasticsearch/index/mapper/core/DoubleFieldMapper.java | 2 +- .../elasticsearch/index/mapper/core/FloatFieldMapper.java | 2 +- .../elasticsearch/index/mapper/core/IntegerFieldMapper.java | 2 +- .../elasticsearch/index/mapper/core/ShortFieldMapper.java | 2 +- .../elasticsearch/index/mapper/core/StringFieldMapper.java | 2 +- .../elasticsearch/index/mapper/geo/GeoPointFieldMapper.java | 2 +- .../elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java | 2 +- .../elasticsearch/index/mapper/internal/AllFieldMapper.java | 2 +- .../index/mapper/internal/FieldNamesFieldMapper.java | 2 +- .../elasticsearch/index/mapper/internal/IdFieldMapper.java | 2 +- .../index/mapper/internal/IndexFieldMapper.java | 2 +- .../index/mapper/internal/ParentFieldMapper.java | 6 +++--- .../index/mapper/internal/RoutingFieldMapper.java | 2 +- .../index/mapper/internal/SourceFieldMapper.java | 2 +- .../elasticsearch/index/mapper/internal/TTLFieldMapper.java | 2 +- .../index/mapper/internal/TimestampFieldMapper.java | 2 +- .../index/mapper/internal/TypeFieldMapper.java | 2 +- .../elasticsearch/index/mapper/internal/UidFieldMapper.java | 2 +- .../index/mapper/internal/VersionFieldMapper.java | 2 +- .../org/elasticsearch/index/mapper/ip/IpFieldMapper.java | 3 +-- 24 files changed, 26 insertions(+), 27 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java index cc638623e99..20bcb15ad53 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/BinaryFieldMapper.java @@ -105,7 +105,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper { } } - public static class BinaryFieldType extends MappedFieldType { + static final class BinaryFieldType extends MappedFieldType { protected boolean tryUncompressing = false; public BinaryFieldType() { diff --git a/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java index cfbcbc45ef1..f33a24d15e1 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/BooleanFieldMapper.java @@ -128,7 +128,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper { } } - public static class BooleanFieldType extends MappedFieldType { + static final class BooleanFieldType extends MappedFieldType { public BooleanFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java index 1a65d58025e..f9800ce9ffa 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/ByteFieldMapper.java @@ -130,7 +130,7 @@ public class ByteFieldMapper extends NumberFieldMapper { } } - public static class ByteFieldType extends NumberFieldType { + static final class ByteFieldType extends NumberFieldType { public ByteFieldType() {} protected ByteFieldType(ByteFieldType ref) { diff --git a/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index eeb28e24121..d229d6a9b6f 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -220,7 +220,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { } } - public static class CompletionFieldType extends MappedFieldType { + static final class CompletionFieldType extends MappedFieldType { public CompletionFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java index a8f4a44e6ab..63f0c87939b 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/DoubleFieldMapper.java @@ -135,7 +135,7 @@ public class DoubleFieldMapper extends NumberFieldMapper { } } - public static class DoubleFieldType extends NumberFieldType { + static final class DoubleFieldType extends NumberFieldType { public DoubleFieldType() {} diff --git a/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java index 50c5f578a48..1a7996befe7 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/FloatFieldMapper.java @@ -136,7 +136,7 @@ public class FloatFieldMapper extends NumberFieldMapper { } } - public static class FloatFieldType extends NumberFieldType { + static final class FloatFieldType extends NumberFieldType { public FloatFieldType() {} diff --git a/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java index 893722d5a0a..0f9eb053fbf 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/IntegerFieldMapper.java @@ -132,7 +132,7 @@ public class IntegerFieldMapper extends NumberFieldMapper { } } - public static class IntegerFieldType extends NumberFieldType { + static final class IntegerFieldType extends NumberFieldType { public IntegerFieldType() {} diff --git a/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java index 382f30eab40..3eefdaf1b6d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/ShortFieldMapper.java @@ -134,7 +134,7 @@ public class ShortFieldMapper extends NumberFieldMapper { } } - public static class ShortFieldType extends NumberFieldType { + static final class ShortFieldType extends NumberFieldType { public ShortFieldType() {} diff --git a/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java index 0fd77fe5875..52458fffb00 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/StringFieldMapper.java @@ -189,7 +189,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa } } - public static class StringFieldType extends MappedFieldType { + static final class StringFieldType extends MappedFieldType { public StringFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 4485aed953b..bb3ada8278d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -277,7 +277,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal } } - public static class GeoPointFieldType extends MappedFieldType { + public static final class GeoPointFieldType extends MappedFieldType { private MappedFieldType geohashFieldType; private int geohashPrecision; diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index 9468088c982..817b09bd071 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -232,7 +232,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } } - public static class GeoShapeFieldType extends MappedFieldType { + public static final class GeoShapeFieldType extends MappedFieldType { private PrefixTreeStrategy defaultStrategy; private RecursivePrefixTreeStrategy recursiveStrategy; diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index 364dee5852b..2af2f2f6d3f 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -155,7 +155,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper { } } - public static class AllFieldType extends MappedFieldType { + static final class AllFieldType extends MappedFieldType { public AllFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java index 631fdb87771..39bf0344ed4 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java @@ -133,7 +133,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa } } - public static class FieldNamesFieldType extends MappedFieldType { + static final class FieldNamesFieldType extends MappedFieldType { public FieldNamesFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index 87cd4f7cb31..6a22f038d2c 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -136,7 +136,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper { } } - public static class IdFieldType extends MappedFieldType { + static final class IdFieldType extends MappedFieldType { public IdFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index 00fe4013ed8..f558b501840 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -119,7 +119,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper } } - public static class IndexFieldType extends MappedFieldType { + static final class IndexFieldType extends MappedFieldType { public IndexFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java index e0e03ec9b1d..a0cb94bf939 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/ParentFieldMapper.java @@ -114,7 +114,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper if (type == null) { throw new MapperParsingException("[_parent] field mapping must contain the [type] option"); } - fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name)); + setupFieldType(context); return new ParentFieldMapper(fieldType, type, fieldDataSettings, context.indexSettings()); } } @@ -147,7 +147,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper } } - public static class ParentFieldType extends MappedFieldType { + static final class ParentFieldType extends MappedFieldType { public ParentFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); @@ -232,7 +232,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper private final BytesRef typeAsBytes; protected ParentFieldMapper(MappedFieldType fieldType, String type, @Nullable Settings fieldDataSettings, Settings indexSettings) { - super(fieldType, false, fieldDataSettings, indexSettings); + super(fieldType, Version.indexCreated(indexSettings).onOrAfter(Version.V_2_0_0), fieldDataSettings, indexSettings); this.type = type; this.typeAsBytes = type == null ? null : new BytesRef(type); } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index d03238caf6e..ccf6d84293d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -124,7 +124,7 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe } } - public static class RoutingFieldType extends MappedFieldType { + static final class RoutingFieldType extends MappedFieldType { public RoutingFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 7fe65d13296..8dcf7f7e746 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -200,7 +200,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper } } - public static class SourceFieldType extends MappedFieldType { + static final class SourceFieldType extends MappedFieldType { public SourceFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index 0040b4a5140..cec4941afec 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -136,7 +136,7 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper { } } - public static class TTLFieldType extends LongFieldType { + static final class TTLFieldType extends LongFieldType { public TTLFieldType() { } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index 5f068395a03..e517a1f801c 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -210,7 +210,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper } } - public static class TimestampFieldType extends DateFieldType { + static final class TimestampFieldType extends DateFieldType { public TimestampFieldType() {} diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index 9128534c468..7fea8b5f391 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -105,7 +105,7 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper { } } - public static class TypeFieldType extends MappedFieldType { + static final class TypeFieldType extends MappedFieldType { public TypeFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index 3d49df6089c..594ec2f7ccb 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -106,7 +106,7 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper { } } - public static class UidFieldType extends MappedFieldType { + static final class UidFieldType extends MappedFieldType { public UidFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java index f4a33b80b5c..5e33faf0ebb 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java @@ -91,7 +91,7 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe } } - public static class VersionFieldType extends MappedFieldType { + static final class VersionFieldType extends MappedFieldType { public VersionFieldType() { super(AbstractFieldMapper.Defaults.FIELD_TYPE); diff --git a/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java index 8cf6b93f5ca..8bc1f3eeecf 100644 --- a/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/ip/IpFieldMapper.java @@ -162,7 +162,7 @@ public class IpFieldMapper extends NumberFieldMapper { } } - public static class IpFieldType extends NumberFieldType { + static final class IpFieldType extends NumberFieldType { public IpFieldType() {} @@ -175,7 +175,6 @@ public class IpFieldMapper extends NumberFieldMapper { return new IpFieldType(this); } - @Override public Long value(Object value) { if (value == null) { From a805cef045eca2ad0bdcf28b50105e01697bdf48 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Tue, 2 Jun 2015 18:14:14 +0200 Subject: [PATCH 03/16] Bulk: allow null values in action/metadata line parameters Closes #11458 --- .../elasticsearch/action/bulk/BulkRequest.java | 2 +- .../action/bulk/BulkRequestTests.java | 8 ++++++++ .../elasticsearch/action/bulk/simple-bulk10.json | 15 +++++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 src/test/java/org/elasticsearch/action/bulk/simple-bulk10.json diff --git a/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index a562dc046b2..d699b8c038f 100644 --- a/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -332,7 +332,7 @@ public class BulkRequest extends ActionRequest implements Composite } else { throw new IllegalArgumentException("Action/metadata line [" + line + "] contains an unknown parameter [" + currentFieldName + "]"); } - } else { + } else if (token != XContentParser.Token.VALUE_NULL) { throw new IllegalArgumentException("Malformed action/metadata line [" + line + "], expected a simple value for field [" + currentFieldName + "] but found [" + token + "]"); } } diff --git a/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 8600092f1d7..6220958a0c6 100644 --- a/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -177,4 +177,12 @@ public class BulkRequestTests extends ElasticsearchTestCase { e.getMessage().contains("Malformed action/metadata line [3], expected START_OBJECT or END_OBJECT but found [START_ARRAY]"), equalTo(true)); } } + + @Test + public void testSimpleBulk10() throws Exception { + String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk10.json"); + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.add(bulkAction.getBytes(Charsets.UTF_8), 0, bulkAction.length(), null, null); + assertThat(bulkRequest.numberOfActions(), equalTo(9)); + } } diff --git a/src/test/java/org/elasticsearch/action/bulk/simple-bulk10.json b/src/test/java/org/elasticsearch/action/bulk/simple-bulk10.json new file mode 100644 index 00000000000..3556dc261b0 --- /dev/null +++ b/src/test/java/org/elasticsearch/action/bulk/simple-bulk10.json @@ -0,0 +1,15 @@ +{ "index" : {"_index":null, "_type":"type1", "_id":"0"} } +{ "field1" : "value1" } +{ "index" : {"_index":"test", "_type":null, "_id":"0"} } +{ "field1" : "value1" } +{ "index" : {"_index":"test", "_type":"type1", "_id":null} } +{ "field1" : "value1" } +{ "delete" : {"_index":null, "_type":"type1", "_id":"0"} } +{ "delete" : {"_index":"test", "_type":null, "_id":"0"} } +{ "delete" : {"_index":"test", "_type":"type1", "_id":null} } +{ "create" : {"_index":null, "_type":"type1", "_id":"0"} } +{ "field1" : "value1" } +{ "create" : {"_index":"test", "_type":null, "_id":"0"} } +{ "field1" : "value1" } +{ "create" : {"_index":"test", "_type":"type1", "_id":null} } +{ "field1" : "value1" } From 5fd96d9371f68645ac6ec2ec17c0faeaa21610a8 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 7 May 2015 12:43:00 -0600 Subject: [PATCH 04/16] [DOCS] Document the `index.shared_filesystem.recover_on_any_node` setting Relates to #10960 Closes #11047 --- .../indices/shadow-replicas.asciidoc | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/docs/reference/indices/shadow-replicas.asciidoc b/docs/reference/indices/shadow-replicas.asciidoc index 1f4db1c90cf..89dccd9f62e 100644 --- a/docs/reference/indices/shadow-replicas.asciidoc +++ b/docs/reference/indices/shadow-replicas.asciidoc @@ -16,6 +16,25 @@ settings, you need to enable using it in elasticsearch.yml: node.enable_custom_paths: true -------------------------------------------------- +You will also need to disable the default security manager that Elasticsearch +runs with. You can do this by either passing +`-Des.security.manager.enabled=false` with the parameters while starting +Elasticsearch, or you can disable it in elasticsearch.yml: + +[source,yaml] +-------------------------------------------------- +security.manager.enabled: false +-------------------------------------------------- + +[WARNING] +======================== +Disabling the security manager means that the Elasticsearch process is not +limited to the directories and files that it can read and write. However, +because the `index.data_path` setting is set when creating the index, the +security manager would prevent writing or reading from the index's location, so +it must be disabled. +======================== + You can then create an index with a custom data path, where each node will use this path for the data: @@ -88,6 +107,12 @@ settings API: Boolean value indicating this index uses a shared filesystem. Defaults to the `true` if `index.shadow_replicas` is set to true, `false` otherwise. +`index.shared_filesystem.recover_on_any_node`:: + Boolean value indicating whether the primary shards for the index should be + allowed to recover on any node in the cluster, regardless of the number of + replicas or whether the node has previously had the shard allocated to it + before. Defaults to `false`. + === Node level settings related to shadow replicas These are non-dynamic settings that need to be configured in `elasticsearch.yml` From 0f207bf4fb85725e0c3893b1232675d79ada83ed Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 3 Jun 2015 12:56:25 +0200 Subject: [PATCH 05/16] Cleanup DeleteByQuery code from IndexShard --- .../elasticsearch/index/shard/IndexShard.java | 20 ---------------- .../shard/TranslogRecoveryPerformer.java | 24 ++++++++++++++++++- 2 files changed, 23 insertions(+), 21 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 06400a3bf82..dbf1af95ade 100644 --- a/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -550,26 +550,6 @@ public class IndexShard extends AbstractIndexShardComponent { indexingService.postDelete(delete); } - public Engine.DeleteByQuery prepareDeleteByQuery(BytesReference source, @Nullable String[] filteringAliases, Engine.Operation.Origin origin, String... types) { - return prepareDeleteByQuery(queryParserService, mapperService, indexAliasesService, indexCache, source, filteringAliases, origin, types); - } - - static Engine.DeleteByQuery prepareDeleteByQuery(IndexQueryParserService queryParserService, MapperService mapperService, IndexAliasesService indexAliasesService, IndexCache indexCache, BytesReference source, @Nullable String[] filteringAliases, Engine.Operation.Origin origin, String... types) { - long startTime = System.nanoTime(); - if (types == null) { - types = Strings.EMPTY_ARRAY; - } - Query query = queryParserService.parseQuery(source).query(); - Query searchFilter = mapperService.searchFilter(types); - if (searchFilter != null) { - query = Queries.filtered(query, searchFilter); - } - - Query aliasFilter = indexAliasesService.aliasFilter(filteringAliases); - BitDocIdSetFilter parentFilter = mapperService.hasNested() ? indexCache.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()) : null; - return new Engine.DeleteByQuery(query, source, filteringAliases, aliasFilter, parentFilter, origin, startTime, types); - } - public Engine.GetResult get(Engine.Get get) { readAllowed(); return engine().get(get); diff --git a/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java index b2dbc1022cf..60d3cfc71b6 100644 --- a/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ b/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java @@ -18,8 +18,14 @@ */ package org.elasticsearch.index.shard; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitDocIdSetFilter; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.index.aliases.IndexAliasesService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.engine.Engine; @@ -122,7 +128,7 @@ public class TranslogRecoveryPerformer { break; case DELETE_BY_QUERY: Translog.DeleteByQuery deleteByQuery = (Translog.DeleteByQuery) operation; - engine.delete(IndexShard.prepareDeleteByQuery(queryParserService, mapperService, indexAliasesService, indexCache, + engine.delete(prepareDeleteByQuery(queryParserService, mapperService, indexAliasesService, indexCache, deleteByQuery.source(), deleteByQuery.filteringAliases(), Engine.Operation.Origin.RECOVERY, deleteByQuery.types())); break; default: @@ -149,6 +155,22 @@ public class TranslogRecoveryPerformer { operationProcessed(); } + private static Engine.DeleteByQuery prepareDeleteByQuery(IndexQueryParserService queryParserService, MapperService mapperService, IndexAliasesService indexAliasesService, IndexCache indexCache, BytesReference source, @Nullable String[] filteringAliases, Engine.Operation.Origin origin, String... types) { + long startTime = System.nanoTime(); + if (types == null) { + types = Strings.EMPTY_ARRAY; + } + Query query = queryParserService.parseQuery(source).query(); + Query searchFilter = mapperService.searchFilter(types); + if (searchFilter != null) { + query = Queries.filtered(query, searchFilter); + } + + Query aliasFilter = indexAliasesService.aliasFilter(filteringAliases); + BitDocIdSetFilter parentFilter = mapperService.hasNested() ? indexCache.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()) : null; + return new Engine.DeleteByQuery(query, source, filteringAliases, aliasFilter, parentFilter, origin, startTime, types); + } + /** * Called once for every processed operation by this recovery performer. * This can be used to get progress information on the translog execution. From 01e8eaf181148d1ae5239800b5d71bd38c0c2896 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 3 Jun 2015 18:07:47 +0200 Subject: [PATCH 06/16] Date Parsing: Add parsing for epoch and epoch in milliseconds This commit changes the date handling. First and foremost Elasticsearch does not try to convert every date to a unix timestamp first and then uses the configured date. This now allows for dates like `2015121212` to be parsed correctly. Instead it is now explicit by adding a `epoch_second` and `epoch_millis` date format. This also means, that the default date format now is `epoch_millis||dateOptionalTime` to remain backwards compatible. Closes #5328 Relates #10971 --- docs/reference/mapping/date-format.asciidoc | 5 ++ .../mapping/fields/timestamp-field.asciidoc | 2 +- .../mapping/types/core-types.asciidoc | 2 +- .../mapping/types/root-object-type.asciidoc | 4 +- .../action/TimestampParsingException.java | 5 ++ .../cluster/metadata/MappingMetaData.java | 14 ++--- .../common/joda/DateMathParser.java | 22 ++------ .../org/elasticsearch/common/joda/Joda.java | 51 +++++++++++++++++++ .../index/mapper/core/DateFieldMapper.java | 35 ++++--------- .../mapper/internal/TimestampFieldMapper.java | 2 +- .../index/query/RangeQueryParser.java | 7 +-- .../support/format/ValueFormat.java | 2 +- .../support/format/ValueParser.java | 9 ++-- .../common/joda/DateMathParserTests.java | 27 ++++++---- .../count/simple/SimpleCountTests.java | 44 ++++++++++++++++ .../deps/joda/SimpleJodaTests.java | 35 +++++++++++++ .../mapper/date/SimpleDateMappingTests.java | 48 ++++++++++------- .../timestamp/TimestampMappingTests.java | 14 +++++ 18 files changed, 228 insertions(+), 100 deletions(-) diff --git a/docs/reference/mapping/date-format.asciidoc b/docs/reference/mapping/date-format.asciidoc index a548d282ee9..9f52f4e8789 100644 --- a/docs/reference/mapping/date-format.asciidoc +++ b/docs/reference/mapping/date-format.asciidoc @@ -198,6 +198,11 @@ year. |`year_month_day`|A formatter for a four digit year, two digit month of year, and two digit day of month. + +|`epoch_second`|A formatter for the number of seconds since the epoch. + +|`epoch_millis`|A formatter for the number of milliseconds since +the epoch. |======================================================================= [float] diff --git a/docs/reference/mapping/fields/timestamp-field.asciidoc b/docs/reference/mapping/fields/timestamp-field.asciidoc index ce7520708f8..0fb1a91d7f5 100644 --- a/docs/reference/mapping/fields/timestamp-field.asciidoc +++ b/docs/reference/mapping/fields/timestamp-field.asciidoc @@ -79,7 +79,7 @@ format>> used to parse the provided timestamp value. For example: } -------------------------------------------------- -Note, the default format is `dateOptionalTime`. The timestamp value will +Note, the default format is `epoch_millis||dateOptionalTime`. The timestamp value will first be parsed as a number and if it fails the format will be tried. [float] diff --git a/docs/reference/mapping/types/core-types.asciidoc b/docs/reference/mapping/types/core-types.asciidoc index 7d25d8d9ab1..43fcc07a0ca 100644 --- a/docs/reference/mapping/types/core-types.asciidoc +++ b/docs/reference/mapping/types/core-types.asciidoc @@ -349,7 +349,7 @@ date type: Defaults to the property/field name. |`format` |The <>. Defaults to `dateOptionalTime`. +format>>. Defaults to `epoch_millis||dateOptionalTime`. |`store` |Set to `true` to store actual field in the index, `false` to not store it. Defaults to `false` (note, the JSON document itself is stored, diff --git a/docs/reference/mapping/types/root-object-type.asciidoc b/docs/reference/mapping/types/root-object-type.asciidoc index a8e8ea000cd..a97f7f57635 100644 --- a/docs/reference/mapping/types/root-object-type.asciidoc +++ b/docs/reference/mapping/types/root-object-type.asciidoc @@ -42,8 +42,8 @@ and will use the matching format as its format attribute. The date format itself is explained <>. -The default formats are: `dateOptionalTime` (ISO) and -`yyyy/MM/dd HH:mm:ss Z||yyyy/MM/dd Z`. +The default formats are: `dateOptionalTime` (ISO), +`yyyy/MM/dd HH:mm:ss Z||yyyy/MM/dd Z` and `epoch_millis`. *Note:* `dynamic_date_formats` are used *only* for dynamically added date fields, not for `date` fields that you specify in your mapping. diff --git a/src/main/java/org/elasticsearch/action/TimestampParsingException.java b/src/main/java/org/elasticsearch/action/TimestampParsingException.java index 634a0bf34a9..d865483a6ac 100644 --- a/src/main/java/org/elasticsearch/action/TimestampParsingException.java +++ b/src/main/java/org/elasticsearch/action/TimestampParsingException.java @@ -32,6 +32,11 @@ public class TimestampParsingException extends ElasticsearchException { this.timestamp = timestamp; } + public TimestampParsingException(String timestamp, Throwable cause) { + super("failed to parse timestamp [" + timestamp + "]", cause); + this.timestamp = timestamp; + } + public String timestamp() { return timestamp; } diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 778effc226b..f3cde99079f 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -161,19 +161,11 @@ public class MappingMetaData extends AbstractDiffable { public static class Timestamp { public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException { - long ts; try { - // if we manage to parse it, its a millisecond timestamp, just return the string as is - ts = Long.parseLong(timestampAsString); - return timestampAsString; - } catch (NumberFormatException e) { - try { - ts = dateTimeFormatter.parser().parseMillis(timestampAsString); - } catch (RuntimeException e1) { - throw new TimestampParsingException(timestampAsString); - } + return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString)); + } catch (RuntimeException e) { + throw new TimestampParsingException(timestampAsString, e); } - return Long.toString(ts); } diff --git a/src/main/java/org/elasticsearch/common/joda/DateMathParser.java b/src/main/java/org/elasticsearch/common/joda/DateMathParser.java index 232630ac61a..7fa1947a501 100644 --- a/src/main/java/org/elasticsearch/common/joda/DateMathParser.java +++ b/src/main/java/org/elasticsearch/common/joda/DateMathParser.java @@ -19,14 +19,14 @@ package org.elasticsearch.common.joda; -import org.apache.commons.lang3.StringUtils; import org.elasticsearch.ElasticsearchParseException; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; import org.joda.time.format.DateTimeFormatter; import java.util.concurrent.Callable; -import java.util.concurrent.TimeUnit; + +import static com.google.common.base.Preconditions.checkNotNull; /** * A parser for date/time formatted text with optional date math. @@ -38,13 +38,10 @@ import java.util.concurrent.TimeUnit; public class DateMathParser { private final FormatDateTimeFormatter dateTimeFormatter; - private final TimeUnit timeUnit; - public DateMathParser(FormatDateTimeFormatter dateTimeFormatter, TimeUnit timeUnit) { - if (dateTimeFormatter == null) throw new NullPointerException(); - if (timeUnit == null) throw new NullPointerException(); + public DateMathParser(FormatDateTimeFormatter dateTimeFormatter) { + checkNotNull(dateTimeFormatter); this.dateTimeFormatter = dateTimeFormatter; - this.timeUnit = timeUnit; } public long parse(String text, Callable now) { @@ -195,17 +192,6 @@ public class DateMathParser { } private long parseDateTime(String value, DateTimeZone timeZone) { - - // first check for timestamp - if (value.length() > 4 && StringUtils.isNumeric(value)) { - try { - long time = Long.parseLong(value); - return timeUnit.toMillis(time); - } catch (NumberFormatException e) { - throw new ElasticsearchParseException("failed to parse date field [" + value + "] as timestamp", e); - } - } - DateTimeFormatter parser = dateTimeFormatter.parser(); if (timeZone != null) { parser = parser.withZone(timeZone); diff --git a/src/main/java/org/elasticsearch/common/joda/Joda.java b/src/main/java/org/elasticsearch/common/joda/Joda.java index 06d6760727f..b00c1ebbd47 100644 --- a/src/main/java/org/elasticsearch/common/joda/Joda.java +++ b/src/main/java/org/elasticsearch/common/joda/Joda.java @@ -27,6 +27,7 @@ import org.joda.time.field.ScaledDurationField; import org.joda.time.format.*; import java.util.Locale; +import java.util.regex.Pattern; /** * @@ -133,6 +134,10 @@ public class Joda { formatter = ISODateTimeFormat.yearMonth(); } else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) { formatter = ISODateTimeFormat.yearMonthDay(); + } else if ("epoch_second".equals(input)) { + formatter = new DateTimeFormatterBuilder().append(new EpochTimeParser(false)).toFormatter(); + } else if ("epoch_millis".equals(input)) { + formatter = new DateTimeFormatterBuilder().append(new EpochTimeParser(true)).toFormatter(); } else if (Strings.hasLength(input) && input.contains("||")) { String[] formats = Strings.delimitedListToStringArray(input, "||"); DateTimeParser[] parsers = new DateTimeParser[formats.length]; @@ -192,4 +197,50 @@ public class Joda { return new OffsetDateTimeField(new DividedDateTimeField(new OffsetDateTimeField(chronology.monthOfYear(), -1), QuarterOfYear, 3), 1); } }; + + public static class EpochTimeParser implements DateTimeParser { + + private static final Pattern MILLI_SECOND_PRECISION_PATTERN = Pattern.compile("^\\d{1,13}$"); + private static final Pattern SECOND_PRECISION_PATTERN = Pattern.compile("^\\d{1,10}$"); + + private final boolean hasMilliSecondPrecision; + private final Pattern pattern; + + public EpochTimeParser(boolean hasMilliSecondPrecision) { + this.hasMilliSecondPrecision = hasMilliSecondPrecision; + this.pattern = hasMilliSecondPrecision ? MILLI_SECOND_PRECISION_PATTERN : SECOND_PRECISION_PATTERN; + } + + @Override + public int estimateParsedLength() { + return hasMilliSecondPrecision ? 13 : 10; + } + + @Override + public int parseInto(DateTimeParserBucket bucket, String text, int position) { + if (text.length() > estimateParsedLength() || + // timestamps have to have UTC timezone + bucket.getZone() != DateTimeZone.UTC || + pattern.matcher(text).matches() == false) { + return -1; + } + + int factor = hasMilliSecondPrecision ? 1 : 1000; + try { + long millis = Long.valueOf(text) * factor; + DateTime dt = new DateTime(millis, DateTimeZone.UTC); + bucket.saveField(DateTimeFieldType.year(), dt.getYear()); + bucket.saveField(DateTimeFieldType.monthOfYear(), dt.getMonthOfYear()); + bucket.saveField(DateTimeFieldType.dayOfMonth(), dt.getDayOfMonth()); + bucket.saveField(DateTimeFieldType.hourOfDay(), dt.getHourOfDay()); + bucket.saveField(DateTimeFieldType.minuteOfHour(), dt.getMinuteOfHour()); + bucket.saveField(DateTimeFieldType.secondOfMinute(), dt.getSecondOfMinute()); + bucket.saveField(DateTimeFieldType.millisOfSecond(), dt.getMillisOfSecond()); + bucket.setZone(DateTimeZone.UTC); + } catch (Exception e) { + return -1; + } + return text.length(); + } + }; } diff --git a/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java index b33182d8b17..510e068464d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/DateFieldMapper.java @@ -46,12 +46,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericDateAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeMappingException; -import org.elasticsearch.index.mapper.MergeResult; -import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.core.LongFieldMapper.CustomLongNumericField; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.internal.SearchContext; @@ -223,7 +218,7 @@ public class DateFieldMapper extends NumberFieldMapper { protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; protected TimeUnit timeUnit = Defaults.TIME_UNIT; - protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); + protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter); public DateFieldType() {} @@ -245,7 +240,7 @@ public class DateFieldMapper extends NumberFieldMapper { public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { checkIfFrozen(); this.dateTimeFormatter = dateTimeFormatter; - this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); + this.dateMathParser = new DateMathParser(dateTimeFormatter); } public TimeUnit timeUnit() { @@ -255,7 +250,7 @@ public class DateFieldMapper extends NumberFieldMapper { public void setTimeUnit(TimeUnit timeUnit) { checkIfFrozen(); this.timeUnit = timeUnit; - this.dateMathParser = new DateMathParser(dateTimeFormatter, timeUnit); + this.dateMathParser = new DateMathParser(dateTimeFormatter); } protected DateMathParser dateMathParser() { @@ -365,9 +360,6 @@ public class DateFieldMapper extends NumberFieldMapper { } public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } DateMathParser dateParser = dateMathParser(); if (forcedDateParser != null) { dateParser = forcedDateParser; @@ -434,17 +426,12 @@ public class DateFieldMapper extends NumberFieldMapper { @Override protected void innerParseCreateField(ParseContext context, List fields) throws IOException { String dateAsString = null; - Long value = null; float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); - if (externalValue instanceof Number) { - value = ((Number) externalValue).longValue(); - } else { - dateAsString = (String) externalValue; - if (dateAsString == null) { - dateAsString = nullValue; - } + dateAsString = (String) externalValue; + if (dateAsString == null) { + dateAsString = nullValue; } } else { XContentParser parser = context.parser(); @@ -452,7 +439,7 @@ public class DateFieldMapper extends NumberFieldMapper { if (token == XContentParser.Token.VALUE_NULL) { dateAsString = nullValue; } else if (token == XContentParser.Token.VALUE_NUMBER) { - value = parser.longValue(coerce.value()); + dateAsString = parser.text(); } else if (token == XContentParser.Token.START_OBJECT) { String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { @@ -462,8 +449,6 @@ public class DateFieldMapper extends NumberFieldMapper { if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { if (token == XContentParser.Token.VALUE_NULL) { dateAsString = nullValue; - } else if (token == XContentParser.Token.VALUE_NUMBER) { - value = parser.longValue(coerce.value()); } else { dateAsString = parser.text(); } @@ -479,14 +464,12 @@ public class DateFieldMapper extends NumberFieldMapper { } } + Long value = null; if (dateAsString != null) { - assert value == null; if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(fieldType.names().fullName(), dateAsString, boost); } value = fieldType().parseStringValue(dateAsString); - } else if (value != null) { - value = ((DateFieldType)fieldType).timeUnit().toMillis(value); } if (value != null) { diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index e517a1f801c..149cac7f1aa 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -58,7 +58,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper public static final String NAME = "_timestamp"; public static final String CONTENT_TYPE = "_timestamp"; - public static final String DEFAULT_DATE_TIME_FORMAT = "dateOptionalTime"; + public static final String DEFAULT_DATE_TIME_FORMAT = "epoch_millis||dateOptionalTime"; public static class Defaults extends DateFieldMapper.Defaults { public static final String NAME = "_timestamp"; diff --git a/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java b/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java index fed95b9eb02..a4e04856500 100644 --- a/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java +++ b/src/main/java/org/elasticsearch/index/query/RangeQueryParser.java @@ -102,7 +102,7 @@ public class RangeQueryParser implements QueryParser { } else if ("time_zone".equals(currentFieldName) || "timeZone".equals(currentFieldName)) { timeZone = DateTimeZone.forID(parser.text()); } else if ("format".equals(currentFieldName)) { - forcedDateParser = new DateMathParser(Joda.forPattern(parser.text()), DateFieldMapper.Defaults.TIME_UNIT); + forcedDateParser = new DateMathParser(Joda.forPattern(parser.text())); } else { throw new QueryParsingException(parseContext, "[range] query does not support [" + currentFieldName + "]"); } @@ -123,11 +123,6 @@ public class RangeQueryParser implements QueryParser { FieldMapper mapper = parseContext.fieldMapper(fieldName); if (mapper != null) { if (mapper instanceof DateFieldMapper) { - if ((from instanceof Number || to instanceof Number) && timeZone != null) { - throw new QueryParsingException(parseContext, - "[range] time_zone when using ms since epoch format as it's UTC based can not be applied to [" + fieldName - + "]"); - } query = ((DateFieldMapper) mapper).fieldType().rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser, parseContext); } else { if (timeZone != null) { diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java index 7efef92364c..696137059d5 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java +++ b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueFormat.java @@ -68,7 +68,7 @@ public class ValueFormat { public static final DateTime DEFAULT = new DateTime(DateFieldMapper.Defaults.DATE_TIME_FORMATTER.format(), ValueFormatter.DateTime.DEFAULT, ValueParser.DateMath.DEFAULT); public static DateTime format(String format) { - return new DateTime(format, new ValueFormatter.DateTime(format), new ValueParser.DateMath(format, DateFieldMapper.Defaults.TIME_UNIT)); + return new DateTime(format, new ValueFormatter.DateTime(format), new ValueParser.DateMath(format)); } public static DateTime mapper(DateFieldMapper mapper) { diff --git a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java index ccc57c34288..296b945632a 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java +++ b/src/main/java/org/elasticsearch/search/aggregations/support/format/ValueParser.java @@ -32,7 +32,6 @@ import java.text.NumberFormat; import java.text.ParseException; import java.util.Locale; import java.util.concurrent.Callable; -import java.util.concurrent.TimeUnit; /** * @@ -81,12 +80,12 @@ public interface ValueParser { */ static class DateMath implements ValueParser { - public static final DateMath DEFAULT = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER, DateFieldMapper.Defaults.TIME_UNIT)); + public static final DateMath DEFAULT = new ValueParser.DateMath(new DateMathParser(DateFieldMapper.Defaults.DATE_TIME_FORMATTER)); private DateMathParser parser; - public DateMath(String format, TimeUnit timeUnit) { - this(new DateMathParser(Joda.forPattern(format), timeUnit)); + public DateMath(String format) { + this(new DateMathParser(Joda.forPattern(format))); } public DateMath(DateMathParser parser) { @@ -110,7 +109,7 @@ public interface ValueParser { } public static DateMath mapper(DateFieldMapper mapper) { - return new DateMath(new DateMathParser(mapper.fieldType().dateTimeFormatter(), DateFieldMapper.Defaults.TIME_UNIT)); + return new DateMath(new DateMathParser(mapper.fieldType().dateTimeFormatter())); } } diff --git a/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java b/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java index cba7a259f41..359e418bb6a 100644 --- a/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java +++ b/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java @@ -23,16 +23,18 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.test.ElasticsearchTestCase; import org.joda.time.DateTimeZone; +import org.junit.Test; +import java.util.TimeZone; import java.util.concurrent.Callable; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; public class DateMathParserTests extends ElasticsearchTestCase { - FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime"); - DateMathParser parser = new DateMathParser(formatter, TimeUnit.MILLISECONDS); + + FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime||epoch_millis"); + DateMathParser parser = new DateMathParser(formatter); private static Callable callable(final long value) { return new Callable() { @@ -195,25 +197,22 @@ public class DateMathParserTests extends ElasticsearchTestCase { public void testTimestamps() { assertDateMathEquals("1418248078000", "2014-12-10T21:47:58.000"); - // timezone does not affect timestamps - assertDateMathEquals("1418248078000", "2014-12-10T21:47:58.000", 0, false, DateTimeZone.forID("-08:00")); - // datemath still works on timestamps assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000"); // also check other time units - DateMathParser parser = new DateMathParser(Joda.forPattern("dateOptionalTime"), TimeUnit.SECONDS); + DateMathParser parser = new DateMathParser(Joda.forPattern("epoch_second||dateOptionalTime")); long datetime = parser.parse("1418248078", callable(0)); assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000"); // a timestamp before 10000 is a year assertDateMathEquals("9999", "9999-01-01T00:00:00.000"); - // 10000 is the first timestamp - assertDateMathEquals("10000", "1970-01-01T00:00:10.000"); + // 10000 is also a year, breaking bwc, used to be a timestamp + assertDateMathEquals("10000", "10000-01-01T00:00:00.000"); // but 10000 with T is still a date format assertDateMathEquals("10000T", "10000-01-01T00:00:00.000"); } - + void assertParseException(String msg, String date, String exc) { try { parser.parse(date, callable(0)); @@ -232,7 +231,7 @@ public class DateMathParserTests extends ElasticsearchTestCase { } public void testIllegalDateFormat() { - assertParseException("Expected bad timestamp exception", Long.toString(Long.MAX_VALUE) + "0", "timestamp"); + assertParseException("Expected bad timestamp exception", Long.toString(Long.MAX_VALUE) + "0", "failed to parse date field"); assertParseException("Expected bad date format exception", "123bogus", "with format"); } @@ -250,4 +249,10 @@ public class DateMathParserTests extends ElasticsearchTestCase { parser.parse("now/d", now, false, null); assertTrue(called.get()); } + + @Test(expected = ElasticsearchParseException.class) + public void testThatUnixTimestampMayNotHaveTimeZone() { + DateMathParser parser = new DateMathParser(Joda.forPattern("epoch_millis")); + parser.parse("1234567890123", callable(42), false, DateTimeZone.forTimeZone(TimeZone.getTimeZone("CET"))); + } } diff --git a/src/test/java/org/elasticsearch/count/simple/SimpleCountTests.java b/src/test/java/org/elasticsearch/count/simple/SimpleCountTests.java index 074d226f4f1..8d613fb6acf 100644 --- a/src/test/java/org/elasticsearch/count/simple/SimpleCountTests.java +++ b/src/test/java/org/elasticsearch/count/simple/SimpleCountTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.count.simple; import org.apache.lucene.util.Constants; import org.elasticsearch.action.count.CountResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ElasticsearchIntegrationTest; @@ -39,6 +40,7 @@ import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.is; public class SimpleCountTests extends ElasticsearchIntegrationTest { @@ -177,4 +179,46 @@ public class SimpleCountTests extends ElasticsearchIntegrationTest { assertHitCount(countResponse, 20l); } } + + @Test + public void testThatNonEpochDatesCanBeSearch() throws Exception { + assertAcked(prepareCreate("test") + .addMapping("type1", + jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("date_field").field("type", "date").field("format", "yyyyMMddHH").endObject().endObject() + .endObject().endObject())); + ensureGreen("test"); + + XContentBuilder document = jsonBuilder() + .startObject() + .field("date_field", "2015060210") + .endObject(); + assertThat(client().prepareIndex("test", "type1").setSource(document).get().isCreated(), is(true)); + + document = jsonBuilder() + .startObject() + .field("date_field", "2014060210") + .endObject(); + assertThat(client().prepareIndex("test", "type1").setSource(document).get().isCreated(), is(true)); + + // this is a timestamp in 2015 and should not be returned in counting when filtering by year + document = jsonBuilder() + .startObject() + .field("date_field", "1433236702") + .endObject(); + assertThat(client().prepareIndex("test", "type1").setSource(document).get().isCreated(), is(true)); + + refresh(); + + assertHitCount(client().prepareCount("test").get(), 3); + + CountResponse countResponse = client().prepareCount("test").setQuery(QueryBuilders.rangeQuery("date_field").from("2015010100").to("2015123123")).get(); + assertHitCount(countResponse, 1); + + countResponse = client().prepareCount("test").setQuery(QueryBuilders.rangeQuery("date_field").from(2015010100).to(2015123123)).get(); + assertHitCount(countResponse, 1); + + countResponse = client().prepareCount("test").setQuery(QueryBuilders.rangeQuery("date_field").from(2015010100).to(2015123123).timeZone("UTC")).get(); + assertHitCount(countResponse, 1); + } } diff --git a/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java b/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java index c306b5546b3..c35953ca210 100644 --- a/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java +++ b/src/test/java/org/elasticsearch/deps/joda/SimpleJodaTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ElasticsearchTestCase; +import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; import org.joda.time.format.*; @@ -248,6 +249,40 @@ public class SimpleJodaTests extends ElasticsearchTestCase { assertThat(time.getMillis(), equalTo(utcTime.getMillis() - TimeValue.timeValueHours(22).millis())); } + @Test + public void testThatEpochsInSecondsCanBeParsed() { + boolean parseMilliSeconds = randomBoolean(); + + // epoch: 1433144433655 => date: Mon Jun 1 09:40:33.655 CEST 2015 + FormatDateTimeFormatter formatter = Joda.forPattern(parseMilliSeconds ? "epoch_millis" : "epoch_second"); + DateTime dateTime = formatter.parser().parseDateTime(parseMilliSeconds ? "1433144433655" : "1433144433"); + + assertThat(dateTime.getYear(), is(2015)); + assertThat(dateTime.getDayOfMonth(), is(1)); + assertThat(dateTime.getMonthOfYear(), is(6)); + assertThat(dateTime.getHourOfDay(), is(7)); // utc timezone, +2 offset due to CEST + assertThat(dateTime.getMinuteOfHour(), is(40)); + assertThat(dateTime.getSecondOfMinute(), is(33)); + + if (parseMilliSeconds) { + assertThat(dateTime.getMillisOfSecond(), is(655)); + } else { + assertThat(dateTime.getMillisOfSecond(), is(0)); + } + } + + @Test(expected = IllegalArgumentException.class) + public void testForInvalidDatesInEpochSecond() { + FormatDateTimeFormatter formatter = Joda.forPattern("epoch_second"); + formatter.parser().parseDateTime(randomFrom("invalid date", "12345678901", "12345678901234")); + } + + @Test(expected = IllegalArgumentException.class) + public void testForInvalidDatesInEpochMillis() { + FormatDateTimeFormatter formatter = Joda.forPattern("epoch_millis"); + formatter.parser().parseDateTime(randomFrom("invalid date", "12345678901234")); + } + private long utcTimeInMillis(String time) { return ISODateTimeFormat.dateOptionalTimeParser().withZone(DateTimeZone.UTC).parseMillis(time); } diff --git a/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index 859bc4ebf0b..86d9fc322e7 100644 --- a/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.util.Constants; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.LocaleUtils; @@ -33,13 +34,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MergeResult; -import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.DateFieldMapper; import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; @@ -51,21 +47,12 @@ import org.joda.time.DateTimeZone; import org.junit.Before; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Locale; -import java.util.Map; +import java.util.*; import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.index.mapper.string.SimpleStringMappingTests.docValuesType; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.*; public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest { @@ -439,4 +426,31 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest { .bytes()); assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(44000L)); } + + public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("date_field").field("type", "date").field("format", "yyyyMMddHH").endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper defaultMapper = mapper("type", mapping); + + XContentBuilder document = XContentFactory.jsonBuilder() + .startObject() + .field("date_field", "2015060210") + .endObject(); + ParsedDocument doc = defaultMapper.parse("type", "1", document.bytes()); + assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L)); + IndexResponse indexResponse = client().prepareIndex("test", "test").setSource(document).get(); + assertThat(indexResponse.isCreated(), is(true)); + + // integers should always be parsed as well... cannot be sure it is a unix timestamp only + doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("date_field", 2015060210) + .endObject() + .bytes()); + assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L)); + indexResponse = client().prepareIndex("test", "test").setSource(document).get(); + assertThat(indexResponse.isCreated(), is(true)); + } } diff --git a/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index 4a127154c54..8c65418892d 100644 --- a/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -775,4 +775,18 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest { assertEquals(MappingMetaData.Timestamp.parseStringTimestamp("1970", Joda.forPattern("YYYY")), request.timestamp()); assertNull(docMapper.parse("type", "1", doc.bytes()).rootDoc().get("_timestamp")); } + + public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").field("path", "custom_timestamp").endObject() + .endObject().endObject().string(); + DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + + XContentBuilder doc = XContentFactory.jsonBuilder().startObject().field("custom_timestamp", 2015060210).endObject(); + IndexRequest request = new IndexRequest("test", "type", "1").source(doc); + MappingMetaData mappingMetaData = new MappingMetaData(docMapper); + request.process(MetaData.builder().build(), mappingMetaData, true, "test"); + + assertThat(request.timestamp(), is("1433239200000")); + } } From b3779a03fcee69ceb794c8d4b88331fe77d49f4c Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Wed, 3 Jun 2015 19:01:29 +0200 Subject: [PATCH 07/16] Test: Fix search query tests to use correct date, mark one test as awaitsfix --- .../org/elasticsearch/search/query/SearchQueryTests.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/test/java/org/elasticsearch/search/query/SearchQueryTests.java b/src/test/java/org/elasticsearch/search/query/SearchQueryTests.java index 61cbcec2aea..45e866717aa 100644 --- a/src/test/java/org/elasticsearch/search/query/SearchQueryTests.java +++ b/src/test/java/org/elasticsearch/search/query/SearchQueryTests.java @@ -1674,7 +1674,7 @@ public class SearchQueryTests extends ElasticsearchIntegrationTest { .startObject("ts") .field("type", "date") .field("ignore_malformed", false) - .field("format", "dateOptionalTime") + .field("format", "epoch_millis") .endObject() .startObject("bs") .field("type", "string") @@ -1683,7 +1683,7 @@ public class SearchQueryTests extends ElasticsearchIntegrationTest { .endObject() .endObject() .endObject()) - .addMapping("bs", "online", "type=boolean", "ts", "type=date,ignore_malformed=false,format=dateOptionalTime")); + .addMapping("bs", "online", "type=boolean", "ts", "type=date,ignore_malformed=false,format=epoch_millis")); ensureGreen(); client().prepareIndex("test", "s", "1").setRouting("Y").setSource("online", false, "bs", "Y", "ts", System.currentTimeMillis() - 100).get(); @@ -2195,6 +2195,7 @@ functionScoreQuery(scriptFunction(new Script("_doc['score'].value")))).setMinSco } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/11478") @Test public void testDateProvidedAsNumber() throws ExecutionException, InterruptedException { createIndex("test"); From 7264fd4eb810c514e80ee7b5e5dfd1c8f95e5d26 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 3 Jun 2015 14:48:30 +0200 Subject: [PATCH 08/16] Fix possible BWC break after upgrading from pre 1.0.0 This is happening because of #4074 when we required that the top-level "query" is present to delete-by-query requests, but prior to that we required that it is not present. So the translog has a DBQ without "query" and when we try to parse it we hit this exception. This commit adds special handling for pre 1.0.0 indices if we hit parse exception, we try to reparse without a top-level query object to be BWC compatible for these indices. Closes #10262 Conflicts: src/main/java/org/elasticsearch/index/shard/IndexShard.java src/test/java/org/elasticsearch/index/shard/IndexShardTests.java --- .../shard/TranslogRecoveryPerformer.java | 24 +++++++++++++- .../index/shard/IndexShardTests.java | 33 +++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java index 60d3cfc71b6..bbe613b61c9 100644 --- a/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ b/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java @@ -21,11 +21,14 @@ package org.elasticsearch.index.shard; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.BitDocIdSetFilter; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.aliases.IndexAliasesService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.engine.Engine; @@ -37,6 +40,8 @@ import org.elasticsearch.index.mapper.MapperUtils; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.query.IndexQueryParserService; +import org.elasticsearch.index.query.ParsedQuery; +import org.elasticsearch.index.query.QueryParsingException; import org.elasticsearch.index.translog.Translog; import java.util.HashMap; @@ -160,7 +165,24 @@ public class TranslogRecoveryPerformer { if (types == null) { types = Strings.EMPTY_ARRAY; } - Query query = queryParserService.parseQuery(source).query(); + Query query; + try { + query = queryParserService.parseQuery(source).query(); + } catch (QueryParsingException ex) { + // for BWC we try to parse directly the query since pre 1.0.0.Beta2 we didn't require a top level query field + if ( queryParserService.getIndexCreatedVersion().onOrBefore(Version.V_1_0_0_Beta2)) { + try { + XContentParser parser = XContentHelper.createParser(source); + ParsedQuery parse = queryParserService.parse(parser); + query = parse.query(); + } catch (Throwable t) { + ex.addSuppressed(t); + throw ex; + } + } else { + throw ex; + } + } Query searchFilter = mapperService.searchFilter(types); if (searchFilter != null) { query = Queries.filtered(query, searchFilter); diff --git a/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 97e988f819f..da7f17ce48d 100644 --- a/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -19,22 +19,26 @@ package org.elasticsearch.index.shard; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.MutableShardRouting; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.query.QueryParsingException; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ElasticsearchSingleNodeTest; +import org.elasticsearch.test.VersionUtils; import org.junit.Test; import java.io.IOException; @@ -329,4 +333,33 @@ public class IndexShardTests extends ElasticsearchSingleNodeTest { client().admin().indices().prepareUpdateSettings(shard.shardId.getIndex()).setSettings(settingsBuilder().put(TranslogConfig.INDEX_TRANSLOG_DURABILITY, durabilty.name()).build()).get(); assertEquals(durabilty, shard.getTranslogDurability()); } + + public void testDeleteByQueryBWC() { + Version version = VersionUtils.randomVersion(random()); + assertAcked(client().admin().indices().prepareCreate("test") + .setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0, IndexMetaData.SETTING_VERSION_CREATED, version.id)); + ensureGreen("test"); + client().prepareIndex("test", "person").setSource("{ \"user\" : \"kimchy\" }").get(); + + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService("test"); + IndexShard shard = test.shard(0); + int numDocs = 1; + shard.state = IndexShardState.RECOVERING; + try { + shard.recoveryState().getTranslog().totalOperations(1); + shard.engine().config().getTranslogRecoveryPerformer().performRecoveryOperation(shard.engine(), new Translog.DeleteByQuery(new Engine.DeleteByQuery(null, new BytesArray("{\"term\" : { \"user\" : \"kimchy\" }}"), null, null, null, Engine.Operation.Origin.RECOVERY, 0, "person"))); + assertTrue(version.onOrBefore(Version.V_1_0_0_Beta2)); + numDocs = 0; + } catch (QueryParsingException ex) { + assertTrue(version.after(Version.V_1_0_0_Beta2)); + } finally { + shard.state = IndexShardState.STARTED; + } + shard.engine().refresh("foo"); + + try (Engine.Searcher searcher = shard.engine().acquireSearcher("foo")) { + assertEquals(numDocs, searcher.reader().numDocs()); + } + } } From d621b160e9b940add546a6234bc2d17381334f33 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Wed, 3 Jun 2015 15:45:07 +0200 Subject: [PATCH 09/16] Use the smallest version rather than the default version The minimum version comparison was always using the default version sicne the comparison was flipped. Closes #11474 --- .../elasticsearch/index/shard/IndexShard.java | 11 +++-------- .../index/shard/IndexShardTests.java | 16 ++++++++++++++++ .../upgrade/UpgradeReallyOldIndexTest.java | 17 +++++++++++++++++ 3 files changed, 36 insertions(+), 8 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/src/main/java/org/elasticsearch/index/shard/IndexShard.java index dbf1af95ade..75b322f45a6 100644 --- a/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -23,8 +23,6 @@ import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.index.CheckIndex; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.ThreadInterruptedException; @@ -41,14 +39,11 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.metrics.MeanMetric; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -720,13 +715,13 @@ public class IndexShard extends AbstractIndexShardComponent { } public org.apache.lucene.util.Version minimumCompatibleVersion() { - org.apache.lucene.util.Version luceneVersion = Version.LUCENE_3_EMULATION_VERSION; + org.apache.lucene.util.Version luceneVersion = null; for(Segment segment : engine().segments(false)) { - if (luceneVersion.onOrAfter(segment.getVersion())) { + if (luceneVersion == null || luceneVersion.onOrAfter(segment.getVersion())) { luceneVersion = segment.getVersion(); } } - return luceneVersion; + return luceneVersion == null ? Version.indexCreated(indexSettings).luceneVersion : luceneVersion; } public SnapshotIndexCommit snapshotIndex(boolean flushFirst) throws EngineException { diff --git a/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index da7f17ce48d..3e7fcc79d55 100644 --- a/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -50,6 +50,7 @@ import java.util.concurrent.ExecutionException; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.settings.Settings.settingsBuilder; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; @@ -362,4 +363,19 @@ public class IndexShardTests extends ElasticsearchSingleNodeTest { assertEquals(numDocs, searcher.reader().numDocs()); } } + + public void testMinimumCompatVersion() { + Version versionCreated = VersionUtils.randomVersion(random()); + assertAcked(client().admin().indices().prepareCreate("test") + .setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0, SETTING_VERSION_CREATED, versionCreated.id)); + client().prepareIndex("test", "test").setSource("{}").get(); + ensureGreen("test"); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexShard test = indicesService.indexService("test").shard(0); + assertEquals(versionCreated.luceneVersion, test.minimumCompatibleVersion()); + client().prepareIndex("test", "test").setSource("{}").get(); + assertEquals(versionCreated.luceneVersion, test.minimumCompatibleVersion()); + test.engine().flush(); + assertEquals(Version.CURRENT.luceneVersion, test.minimumCompatibleVersion()); + } } diff --git a/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexTest.java b/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexTest.java index ce2d54bb30f..3fd6bf4c5fb 100644 --- a/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexTest.java +++ b/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeReallyOldIndexTest.java @@ -19,7 +19,11 @@ package org.elasticsearch.rest.action.admin.indices.upgrade; +import org.elasticsearch.Version; import org.elasticsearch.bwcompat.StaticIndexBackwardCompatibilityTest; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.indices.IndicesService; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -29,6 +33,7 @@ public class UpgradeReallyOldIndexTest extends StaticIndexBackwardCompatibilityT String indexName = "index-0.90.6"; loadIndex(indexName); + assertMinVersion(indexName, org.apache.lucene.util.Version.parse("4.5.1")); UpgradeTest.assertNotUpgraded(client(), indexName); assertTrue(UpgradeTest.hasAncientSegments(client(), indexName)); assertNoFailures(client().admin().indices().prepareUpgrade(indexName).setUpgradeOnlyAncientSegments(true).get()); @@ -36,6 +41,18 @@ public class UpgradeReallyOldIndexTest extends StaticIndexBackwardCompatibilityT assertFalse(UpgradeTest.hasAncientSegments(client(), "index-0.90.6")); // This index has only ancient segments, so it should now be fully upgraded: UpgradeTest.assertUpgraded(client(), indexName); + assertEquals(Version.CURRENT.luceneVersion.toString(), client().admin().indices().prepareGetSettings(indexName).get().getSetting(indexName, IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE)); + assertMinVersion(indexName, Version.CURRENT.luceneVersion); + } + + private void assertMinVersion(String index, org.apache.lucene.util.Version version) { + for (IndicesService services : internalCluster().getInstances(IndicesService.class)) { + IndexService indexService = services.indexService(index); + if (indexService != null) { + assertEquals(version, indexService.shard(0).minimumCompatibleVersion()); + } + } + } } From f0e6addc53bdd82dd55bab96f024012a7ba8456c Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Mon, 1 Jun 2015 13:54:31 -1000 Subject: [PATCH 10/16] Snapshot/Restore: sync up snapshot shard status on a master restart When a snapshot operation on a particular shard finishes, the data node where this shard resides sends an update shard status request to the master node to indicate that the operation on the shard is done. When the master node receives the command it queues cluster state update task and acknowledges the receipt of the command to the data node. The update snapshot shard status tasks have relatively low priority, so during cluster instability they tend to get stuck at the end of the queue. If the master node gets restarted before processing these tasks the information about the shards can be lost and the new master assumes that they are still in process while the data node thinks that these shards are already done. This commit add a retry mechanism that checks compares cluster state of a newly elected master and the current state of snapshot shards and updates the cluster state on the master again if needed. Closes #11314 --- .../snapshots/SnapshotsService.java | 65 ++++++++- .../snapshots/AbstractSnapshotTests.java | 131 ++++++++++++++++++ .../DedicatedClusterSnapshotRestoreTests.java | 80 +++++++++++ .../SharedClusterSnapshotRestoreTests.java | 122 ---------------- 4 files changed, 271 insertions(+), 127 deletions(-) diff --git a/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index cdf068cecee..ae22fbfa055 100644 --- a/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -549,13 +549,18 @@ public class SnapshotsService extends AbstractLifecycleComponent survivors = newHashMap(); // First, remove snapshots that are no longer there for (Map.Entry entry : shardSnapshots.entrySet()) { @@ -830,7 +836,17 @@ public class SnapshotsService extends AbstractLifecycleComponent shard : entry.shards().entrySet()) { IndexShardSnapshotStatus snapshotStatus = snapshotShards.shards.get(shard.getKey()); if (snapshotStatus != null) { - snapshotStatus.abort(); + if (snapshotStatus.stage() == IndexShardSnapshotStatus.Stage.STARTED) { + snapshotStatus.abort(); + } else if (snapshotStatus.stage() == IndexShardSnapshotStatus.Stage.DONE) { + logger.debug("[{}] trying to cancel snapshot on the shard [{}] that is already done, updating status on the master", entry.snapshotId(), shard.getKey()); + updateIndexShardSnapshotStatus(new UpdateIndexShardSnapshotStatusRequest(entry.snapshotId(), shard.getKey(), + new ShardSnapshotStatus(event.state().nodes().localNodeId(), SnapshotMetaData.State.SUCCESS))); + } else if (snapshotStatus.stage() == IndexShardSnapshotStatus.Stage.FAILURE) { + logger.debug("[{}] trying to cancel snapshot on the shard [{}] that has already failed, updating status on the master", entry.snapshotId(), shard.getKey()); + updateIndexShardSnapshotStatus(new UpdateIndexShardSnapshotStatusRequest(entry.snapshotId(), shard.getKey(), + new ShardSnapshotStatus(event.state().nodes().localNodeId(), State.FAILED, snapshotStatus.failure()))); + } } } } @@ -878,6 +894,45 @@ public class SnapshotsService extends AbstractLifecycleComponent localShards = currentSnapshotShards(snapshot.snapshotId()); + if (localShards != null) { + ImmutableMap masterShards = snapshot.shards(); + for(Map.Entry localShard : localShards.entrySet()) { + ShardId shardId = localShard.getKey(); + IndexShardSnapshotStatus localShardStatus = localShard.getValue(); + ShardSnapshotStatus masterShard = masterShards.get(shardId); + if (masterShard != null && masterShard.state().completed() == false) { + // Master knows about the shard and thinks it has not completed + if (localShardStatus.stage() == IndexShardSnapshotStatus.Stage.DONE) { + // but we think the shard is done - we need to make new master know that the shard is done + logger.debug("[{}] new master thinks the shard [{}] is not completed but the shard is done locally, updating status on the master", snapshot.snapshotId(), shardId); + updateIndexShardSnapshotStatus(new UpdateIndexShardSnapshotStatusRequest(snapshot.snapshotId(), shardId, + new ShardSnapshotStatus(event.state().nodes().localNodeId(), SnapshotMetaData.State.SUCCESS))); + } else if (localShard.getValue().stage() == IndexShardSnapshotStatus.Stage.FAILURE) { + // but we think the shard failed - we need to make new master know that the shard failed + logger.debug("[{}] new master thinks the shard [{}] is not completed but the shard failed locally, updating status on master", snapshot.snapshotId(), shardId); + updateIndexShardSnapshotStatus(new UpdateIndexShardSnapshotStatusRequest(snapshot.snapshotId(), shardId, + new ShardSnapshotStatus(event.state().nodes().localNodeId(), State.FAILED, localShardStatus.failure()))); + + } + } + } + } + } + } + } + /** * Updates the shard status * diff --git a/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotTests.java b/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotTests.java index 04ff667b3f3..024065a1d2f 100644 --- a/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotTests.java +++ b/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotTests.java @@ -22,8 +22,12 @@ import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; +import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.metadata.SnapshotId; import org.elasticsearch.cluster.metadata.SnapshotMetaData; +import org.elasticsearch.cluster.service.PendingClusterTask; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.repositories.RepositoriesService; @@ -37,9 +41,12 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; /** */ @@ -121,4 +128,128 @@ public abstract class AbstractSnapshotTests extends ElasticsearchIntegrationTest public static void unblockNode(String node) { ((MockRepository)internalCluster().getInstance(RepositoriesService.class, node).repository("test-repo")).unblock(); } + + protected void assertBusyPendingTasks(final String taskPrefix, final int expectedCount) throws Exception { + assertBusy(new Runnable() { + @Override + public void run() { + PendingClusterTasksResponse tasks = client().admin().cluster().preparePendingClusterTasks().get(); + int count = 0; + for(PendingClusterTask task : tasks) { + if (task.getSource().toString().startsWith(taskPrefix)) { + count++; + } + } + assertThat(count, greaterThanOrEqualTo(expectedCount)); + } + }, 1, TimeUnit.MINUTES); + } + + /** + * Cluster state task that blocks waits for the blockOn task to show up and then blocks execution not letting + * any cluster state update task to be performed unless they have priority higher then passThroughPriority. + * + * This class is useful to testing of cluster state update task batching for lower priority tasks. + */ + protected class BlockingClusterStateListener implements ClusterStateListener { + + private final Predicate blockOn; + private final Predicate countOn; + private final ClusterService clusterService; + private final CountDownLatch latch; + private final Priority passThroughPriority; + private int count; + private boolean timedOut; + private final TimeValue timeout; + private long stopWaitingAt = -1; + + public BlockingClusterStateListener(ClusterService clusterService, String blockOn, String countOn, Priority passThroughPriority) { + this(clusterService, blockOn, countOn, passThroughPriority, TimeValue.timeValueMinutes(1)); + } + + public BlockingClusterStateListener(ClusterService clusterService, final String blockOn, final String countOn, Priority passThroughPriority, TimeValue timeout) { + this.clusterService = clusterService; + this.blockOn = new Predicate() { + @Override + public boolean apply(ClusterChangedEvent clusterChangedEvent) { + return clusterChangedEvent.source().startsWith(blockOn); + } + }; + this.countOn = new Predicate() { + @Override + public boolean apply(ClusterChangedEvent clusterChangedEvent) { + return clusterChangedEvent.source().startsWith(countOn); + } + }; + this.latch = new CountDownLatch(1); + this.passThroughPriority = passThroughPriority; + this.timeout = timeout; + + } + + public void unblock() { + latch.countDown(); + } + + @Override + public void clusterChanged(ClusterChangedEvent event) { + if (blockOn.apply(event)) { + logger.info("blocking cluster state tasks on [{}]", event.source()); + assert stopWaitingAt < 0; // Make sure we are the first time here + stopWaitingAt = System.currentTimeMillis() + timeout.getMillis(); + addBlock(); + } + if (countOn.apply(event)) { + count++; + } + } + + private void addBlock() { + // We should block after this task - add blocking cluster state update task + clusterService.submitStateUpdateTask("test_block", passThroughPriority, new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + while(System.currentTimeMillis() < stopWaitingAt) { + for (PendingClusterTask task : clusterService.pendingTasks()) { + if (task.getSource().string().equals("test_block") == false && passThroughPriority.sameOrAfter(task.getPriority())) { + // There are other higher priority tasks in the queue and let them pass through and then set the block again + logger.info("passing through cluster state task {}", task.getSource()); + addBlock(); + return currentState; + } + } + try { + logger.info("waiting...."); + if (latch.await(Math.min(100, timeout.millis()), TimeUnit.MILLISECONDS)){ + // Done waiting - unblock + logger.info("unblocked"); + return currentState; + } + logger.info("done waiting...."); + } catch (InterruptedException ex) { + logger.info("interrupted...."); + Thread.currentThread().interrupt(); + return currentState; + } + } + timedOut = true; + return currentState; + } + + @Override + public void onFailure(String source, Throwable t) { + logger.warn("failed to execute [{}]", t, source); + } + }); + + } + + public int count() { + return count; + } + + public boolean timedOut() { + return timedOut; + } + } } diff --git a/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreTests.java b/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreTests.java index d36bd523154..9716d23a04f 100644 --- a/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreTests.java +++ b/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotR import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.action.admin.indices.recovery.ShardRecoveryResponse; +import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; @@ -41,6 +42,7 @@ import org.elasticsearch.cluster.ProcessedClusterStateUpdateTask; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData.Custom; +import org.elasticsearch.cluster.metadata.SnapshotMetaData; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.metadata.MetaDataIndexStateService; import org.elasticsearch.common.Nullable; @@ -64,6 +66,7 @@ import org.elasticsearch.rest.action.admin.cluster.state.RestClusterStateAction; import org.elasticsearch.snapshots.mockstore.MockRepositoryModule; import org.elasticsearch.snapshots.mockstore.MockRepositoryPlugin; import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.rest.FakeRestRequest; import org.junit.Ignore; import org.junit.Test; @@ -791,6 +794,83 @@ public class DedicatedClusterSnapshotRestoreTests extends AbstractSnapshotTests logger.info("--> done"); } + @Test + public void masterShutdownDuringSnapshotTest() throws Exception { + + Settings masterSettings = settingsBuilder().put("node.data", false).build(); + Settings dataSettings = settingsBuilder().put("node.master", false).build(); + + logger.info("--> starting two master nodes and two data nodes"); + internalCluster().startNode(masterSettings); + internalCluster().startNode(masterSettings); + internalCluster().startNode(dataSettings); + internalCluster().startNode(dataSettings); + + final Client client = client(); + + logger.info("--> creating repository"); + assertAcked(client.admin().cluster().preparePutRepository("test-repo") + .setType("fs").setSettings(Settings.settingsBuilder() + .put("location", randomRepoPath()) + .put("compress", randomBoolean()) + .put("chunk_size", randomIntBetween(100, 1000)))); + + assertAcked(prepareCreate("test-idx", 0, settingsBuilder().put("number_of_shards", between(1, 20)) + .put("number_of_replicas", 0))); + ensureGreen(); + + logger.info("--> indexing some data"); + final int numdocs = randomIntBetween(10, 100); + IndexRequestBuilder[] builders = new IndexRequestBuilder[numdocs]; + for (int i = 0; i < builders.length; i++) { + builders[i] = client().prepareIndex("test-idx", "type1", Integer.toString(i)).setSource("field1", "bar " + i); + } + indexRandom(true, builders); + flushAndRefresh(); + + final int numberOfShards = getNumShards("test-idx").numPrimaries; + logger.info("number of shards: {}", numberOfShards); + + final ClusterService clusterService = internalCluster().clusterService(internalCluster().getMasterName()); + BlockingClusterStateListener snapshotListener = new BlockingClusterStateListener(clusterService, "update_snapshot [", "update snapshot state", Priority.HIGH); + try { + clusterService.addFirst(snapshotListener); + logger.info("--> snapshot"); + dataNodeClient().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(false).setIndices("test-idx").get(); + + // Await until some updates are in pending state. + assertBusyPendingTasks("update snapshot state", 1); + + logger.info("--> stopping master node"); + internalCluster().stopCurrentMasterNode(); + + logger.info("--> unblocking snapshot execution"); + snapshotListener.unblock(); + + logger.info("--> wait until the snapshot is done"); + + } finally { + clusterService.remove(snapshotListener); + } + + assertBusy(new Runnable() { + @Override + public void run() { + SnapshotsStatusResponse snapshotsStatusResponse = client().admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test-snap").get(); + ImmutableList snapshotStatuses = snapshotsStatusResponse.getSnapshots(); + assertEquals(1, snapshotStatuses.size()); + assertTrue(snapshotStatuses.get(0).getState().completed()); + } + }); + + GetSnapshotsResponse snapshotsStatusResponse = client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get(); + SnapshotInfo snapshotInfo = snapshotsStatusResponse.getSnapshots().get(0); + assertEquals(SnapshotState.SUCCESS, snapshotInfo.state()); + assertEquals(snapshotInfo.totalShards(), snapshotInfo.successfulShards()); + assertEquals(0, snapshotInfo.failedShards()); + } + + private boolean snapshotIsDone(String repository, String snapshot) { try { SnapshotsStatusResponse snapshotsStatusResponse = client().admin().cluster().prepareSnapshotStatus(repository).setSnapshots(snapshot).get(); diff --git a/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java b/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java index aabebed72c6..d75e3d3a18b 100644 --- a/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java +++ b/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java @@ -1893,126 +1893,4 @@ public class SharedClusterSnapshotRestoreTests extends AbstractSnapshotTests { // Check that cluster state update task was called only once assertEquals(1, restoreListener.count()); } - - private void assertBusyPendingTasks(final String taskPrefix, final int expectedCount) throws Exception { - assertBusy(new Runnable() { - @Override - public void run() { - PendingClusterTasksResponse tasks = client().admin().cluster().preparePendingClusterTasks().get(); - int count = 0; - for(PendingClusterTask task : tasks) { - if (task.getSource().toString().startsWith(taskPrefix)) { - count++; - } - } - assertThat(count, equalTo(expectedCount)); - } - }, 1, TimeUnit.MINUTES); - } - - /** - * Cluster state task that blocks waits for the blockOn task to show up and then blocks execution not letting - * any cluster state update task to be performed unless they have priority higher then passThroughPriority. - * - * This class is useful to testing of cluster state update task batching for lower priority tasks. - */ - public class BlockingClusterStateListener implements ClusterStateListener { - - private final Predicate blockOn; - private final Predicate countOn; - private final ClusterService clusterService; - private final CountDownLatch latch; - private final Priority passThroughPriority; - private int count; - private boolean timedOut; - private final TimeValue timeout; - private long stopWaitingAt = -1; - - public BlockingClusterStateListener(ClusterService clusterService, String blockOn, String countOn, Priority passThroughPriority) { - this(clusterService, blockOn, countOn, passThroughPriority, TimeValue.timeValueMinutes(1)); - } - - public BlockingClusterStateListener(ClusterService clusterService, final String blockOn, final String countOn, Priority passThroughPriority, TimeValue timeout) { - this.clusterService = clusterService; - this.blockOn = new Predicate() { - @Override - public boolean apply(ClusterChangedEvent clusterChangedEvent) { - return clusterChangedEvent.source().startsWith(blockOn); - } - }; - this.countOn = new Predicate() { - @Override - public boolean apply(ClusterChangedEvent clusterChangedEvent) { - return clusterChangedEvent.source().startsWith(countOn); - } - }; - this.latch = new CountDownLatch(1); - this.passThroughPriority = passThroughPriority; - this.timeout = timeout; - - } - - public void unblock() { - latch.countDown(); - } - - @Override - public void clusterChanged(ClusterChangedEvent event) { - if (blockOn.apply(event)) { - logger.info("blocking cluster state tasks on [{}]", event.source()); - assert stopWaitingAt < 0; // Make sure we are the first time here - stopWaitingAt = System.currentTimeMillis() + timeout.getMillis(); - addBlock(); - } - if (countOn.apply(event)) { - count++; - } - } - - private void addBlock() { - // We should block after this task - add blocking cluster state update task - clusterService.submitStateUpdateTask("test_block", passThroughPriority, new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - while(System.currentTimeMillis() < stopWaitingAt) { - for (PendingClusterTask task : clusterService.pendingTasks()) { - if (task.getSource().string().equals("test_block") == false && passThroughPriority.sameOrAfter(task.getPriority())) { - // There are other higher priority tasks in the queue and let them pass through and then set the block again - logger.info("passing through cluster state task {}", task.getSource()); - addBlock(); - return currentState; - } - } - try { - logger.info("wating...."); - if (latch.await(Math.min(100, timeout.millis()), TimeUnit.MILLISECONDS)){ - // Done waiting - unblock - logger.info("unblocked"); - return currentState; - } - logger.info("done wating...."); - } catch (InterruptedException ex) { - Thread.currentThread().interrupt(); - } - } - timedOut = true; - return currentState; - } - - @Override - public void onFailure(String source, Throwable t) { - logger.warn("failed to execute [{}]", t, source); - } - }); - - } - - public int count() { - return count; - } - - public boolean timedOut() { - return timedOut; - } - } } From 90cbf80fc4e0864d83f3d8ebe9ff14b0f0cecde0 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Wed, 3 Jun 2015 10:05:46 -1000 Subject: [PATCH 11/16] Tests: remove race condition in the masterShutdownDuringSnapshotTest --- .../DedicatedClusterSnapshotRestoreTests.java | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreTests.java b/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreTests.java index 9716d23a04f..c1c59dda13b 100644 --- a/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreTests.java +++ b/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreTests.java @@ -847,21 +847,22 @@ public class DedicatedClusterSnapshotRestoreTests extends AbstractSnapshotTests logger.info("--> unblocking snapshot execution"); snapshotListener.unblock(); - logger.info("--> wait until the snapshot is done"); - } finally { clusterService.remove(snapshotListener); } + logger.info("--> wait until the snapshot is done"); + assertBusy(new Runnable() { @Override public void run() { - SnapshotsStatusResponse snapshotsStatusResponse = client().admin().cluster().prepareSnapshotStatus("test-repo").setSnapshots("test-snap").get(); - ImmutableList snapshotStatuses = snapshotsStatusResponse.getSnapshots(); - assertEquals(1, snapshotStatuses.size()); - assertTrue(snapshotStatuses.get(0).getState().completed()); + GetSnapshotsResponse snapshotsStatusResponse = client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get(); + SnapshotInfo snapshotInfo = snapshotsStatusResponse.getSnapshots().get(0); + assertTrue(snapshotInfo.state().completed()); } - }); + }, 1, TimeUnit.MINUTES); + + logger.info("--> verify that snapshot was succesful"); GetSnapshotsResponse snapshotsStatusResponse = client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").get(); SnapshotInfo snapshotInfo = snapshotsStatusResponse.getSnapshots().get(0); From 26d71fe00e8b9f5986a277ab5b6d2bb470a13766 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 3 Jun 2015 18:47:44 +0200 Subject: [PATCH 12/16] Reduce shard inactivity timeout to 5m To better distribute the memory allocating to indexing, the IndexingMemoryController periodically checks the different shard for their last indexing activity. If no activity has happened for a while, the controller marks the shards as in active and allocated it's memory buffer budget (but a small minimal budget) to other active shards. The recently added synced flush feature (#11179, #11336) uses this inactivity trigger to attempt as a trigger to attempt adding a sync id marker (which will speed up future recoveries). We wait for 30m before declaring a shard inactive. However, these days the operation just requires a refresh and is light. We can be stricter (and 5m) increase the chance a synced flush will be triggered. Closes #11479 --- docs/reference/indices/flush.asciidoc | 4 ++-- .../indices/memory/IndexingMemoryController.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index 118a68b7128..3ee2601bb38 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -50,7 +50,7 @@ POST /_flush === Synced Flush Elasticsearch tracks the indexing activity of each shard. Shards that have not -received any indexing operations for 30 minutes are automatically marked as inactive. This presents +received any indexing operations for 5 minutes are automatically marked as inactive. This presents an opportunity for Elasticsearch to reduce shard resources and also perform a special kind of flush, called `synced flush`. A synced flush performs a normal flush, then adds a generated unique marker (sync_id) to all shards. @@ -117,7 +117,7 @@ which returns something similar to: === Synced Flush API The Synced Flush API allows an administrator to initiate a synced flush manually. This can be particularly useful for -a planned (rolling) cluster restart where you can stop indexing and don't want to wait the default 30 minutes for +a planned (rolling) cluster restart where you can stop indexing and don't want to wait the default 5 minutes for idle indices to be sync-flushed automatically. While handy, there are a couple of caveats for this API: diff --git a/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java b/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java index b70a21c9df9..430b7634672 100644 --- a/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java +++ b/src/main/java/org/elasticsearch/indices/memory/IndexingMemoryController.java @@ -115,7 +115,7 @@ public class IndexingMemoryController extends AbstractLifecycleComponent Date: Thu, 4 Jun 2015 01:59:22 +0200 Subject: [PATCH 13/16] Docs: Reorganised the Query DSL docs into families and explaing query vs filter context --- docs/reference/query-dsl.asciidoc | 62 ++++++++----- docs/reference/query-dsl/and-query.asciidoc | 2 +- docs/reference/query-dsl/bool-query.asciidoc | 2 +- .../query-dsl/boosting-query.asciidoc | 2 +- .../query-dsl/common-terms-query.asciidoc | 8 +- .../query-dsl/compound-queries.asciidoc | 69 ++++++++++++++ .../query-dsl/constant-score-query.asciidoc | 2 +- .../query-dsl/dis-max-query.asciidoc | 2 +- .../reference/query-dsl/exists-query.asciidoc | 4 +- .../query-dsl/filtered-query.asciidoc | 8 +- .../query-dsl/full-text-queries.asciidoc | 44 +++++++++ .../query-dsl/function-score-query.asciidoc | 38 ++++---- docs/reference/query-dsl/fuzzy-query.asciidoc | 8 +- .../query-dsl/geo-bounding-box-query.asciidoc | 20 ++-- .../query-dsl/geo-distance-query.asciidoc | 18 ++-- .../geo-distance-range-query.asciidoc | 2 +- .../query-dsl/geo-polygon-query.asciidoc | 12 +-- docs/reference/query-dsl/geo-queries.asciidoc | 50 ++++++++++ .../query-dsl/geo-shape-query.asciidoc | 10 +- .../query-dsl/geohash-cell-query.asciidoc | 2 +- .../query-dsl/has-child-query.asciidoc | 6 +- .../query-dsl/has-parent-query.asciidoc | 4 +- docs/reference/query-dsl/ids-query.asciidoc | 2 +- docs/reference/query-dsl/index.asciidoc | 2 + .../query-dsl/indices-query.asciidoc | 6 +- .../query-dsl/joining-queries.asciidoc | 32 +++++++ docs/reference/query-dsl/limit-query.asciidoc | 2 +- .../query-dsl/match-all-query.asciidoc | 13 +-- docs/reference/query-dsl/match-query.asciidoc | 15 +-- .../query-dsl/missing-query.asciidoc | 6 +- docs/reference/query-dsl/mlt-query.asciidoc | 12 +-- .../query-dsl/multi-match-query.asciidoc | 22 ++--- .../reference/query-dsl/nested-query.asciidoc | 2 +- docs/reference/query-dsl/not-query.asciidoc | 2 +- docs/reference/query-dsl/or-query.asciidoc | 2 +- .../reference/query-dsl/prefix-query.asciidoc | 2 +- .../query-dsl/query-string-query.asciidoc | 6 +- .../query-dsl/query-string-syntax.asciidoc | 36 +++---- .../query-dsl/query_filter_context.asciidoc | 77 +++++++++++++++ docs/reference/query-dsl/range-query.asciidoc | 4 +- .../reference/query-dsl/regexp-query.asciidoc | 2 +- .../query-dsl/regexp-syntax.asciidoc | 10 +- .../reference/query-dsl/script-query.asciidoc | 4 +- .../simple-query-string-query.asciidoc | 10 +- .../query-dsl/span-containing-query.asciidoc | 2 +- .../query-dsl/span-first-query.asciidoc | 2 +- .../query-dsl/span-multi-term-query.asciidoc | 2 +- .../query-dsl/span-near-query.asciidoc | 2 +- .../query-dsl/span-not-query.asciidoc | 2 +- .../query-dsl/span-or-query.asciidoc | 2 +- .../reference/query-dsl/span-queries.asciidoc | 65 +++++++++++++ .../query-dsl/span-term-query.asciidoc | 2 +- .../query-dsl/span-within-query.asciidoc | 2 +- .../query-dsl/special-queries.asciidoc | 29 ++++++ .../query-dsl/template-query.asciidoc | 4 +- .../query-dsl/term-level-queries.asciidoc | 93 +++++++++++++++++++ docs/reference/query-dsl/term-query.asciidoc | 2 +- docs/reference/query-dsl/terms-query.asciidoc | 17 ++-- docs/reference/query-dsl/type-query.asciidoc | 2 +- .../query-dsl/wildcard-query.asciidoc | 2 +- 60 files changed, 668 insertions(+), 206 deletions(-) create mode 100644 docs/reference/query-dsl/compound-queries.asciidoc create mode 100644 docs/reference/query-dsl/full-text-queries.asciidoc create mode 100644 docs/reference/query-dsl/geo-queries.asciidoc create mode 100644 docs/reference/query-dsl/joining-queries.asciidoc create mode 100644 docs/reference/query-dsl/query_filter_context.asciidoc create mode 100644 docs/reference/query-dsl/span-queries.asciidoc create mode 100644 docs/reference/query-dsl/special-queries.asciidoc create mode 100644 docs/reference/query-dsl/term-level-queries.asciidoc diff --git a/docs/reference/query-dsl.asciidoc b/docs/reference/query-dsl.asciidoc index 3f2869e5981..b4a82afdc28 100644 --- a/docs/reference/query-dsl.asciidoc +++ b/docs/reference/query-dsl.asciidoc @@ -3,32 +3,48 @@ [partintro] -- -*elasticsearch* provides a full Query DSL based on JSON to define -queries. In general, there are basic queries such as -<> or -<>. There are -also compound queries like the -<> query. -While queries have scoring capabilities, in some contexts they will -only be used to filter the result set, such as in the -<> or -<> -queries. +Elasticsearch provides a full Query DSL based on JSON to define queries. +Think of the Query DSL as an AST of queries, consisting of two types of +clauses: -Think of the Query DSL as an AST of queries. -Some queries can be used by themselves like the -<> query but other queries can contain -queries (like the <> query), and each -of these composite queries can contain *any* query of the list of -queries, resulting in the ability to build quite -complex (and interesting) queries. +Leaf query clauses:: -Queries can be used in different APIs. For example, -within a <>, or -as an <>. -This section explains the queries that can form the AST one can use. +Leaf query clauses look for a particular value in a particular field, such as the +<>, <> or +<> queries. These queries can be used +by themselves. +Compound query clauses:: + +Compound query clauses wrap other leaf *or* compound queries and are used to combine +multiple queries in a logical fashion (such as the +<> or <> query), +or to alter their behaviour (such as the <> or +<> query). + +Query clauses behave differently depending on whether they are used in +<>. -- -include::query-dsl/index.asciidoc[] +include::query-dsl/query_filter_context.asciidoc[] + +include::query-dsl/match-all-query.asciidoc[] + +include::query-dsl/full-text-queries.asciidoc[] + +include::query-dsl/term-level-queries.asciidoc[] + +include::query-dsl/compound-queries.asciidoc[] + +include::query-dsl/joining-queries.asciidoc[] + +include::query-dsl/geo-queries.asciidoc[] + +include::query-dsl/special-queries.asciidoc[] + +include::query-dsl/span-queries.asciidoc[] + +include::query-dsl/minimum-should-match.asciidoc[] + +include::query-dsl/multi-term-rewrite.asciidoc[] diff --git a/docs/reference/query-dsl/and-query.asciidoc b/docs/reference/query-dsl/and-query.asciidoc index 15a52e57b16..864feac4cf2 100644 --- a/docs/reference/query-dsl/and-query.asciidoc +++ b/docs/reference/query-dsl/and-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-and-query]] -== And Query +=== And Query deprecated[2.0.0, Use the `bool` query instead] diff --git a/docs/reference/query-dsl/bool-query.asciidoc b/docs/reference/query-dsl/bool-query.asciidoc index ac6e9f62250..9f0c3246c07 100644 --- a/docs/reference/query-dsl/bool-query.asciidoc +++ b/docs/reference/query-dsl/bool-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-bool-query]] -== Bool Query +=== Bool Query A query that matches documents matching boolean combinations of other queries. The bool query maps to Lucene `BooleanQuery`. It is built using diff --git a/docs/reference/query-dsl/boosting-query.asciidoc b/docs/reference/query-dsl/boosting-query.asciidoc index 752fc2b9f64..969b3bbedfe 100644 --- a/docs/reference/query-dsl/boosting-query.asciidoc +++ b/docs/reference/query-dsl/boosting-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-boosting-query]] -== Boosting Query +=== Boosting Query The `boosting` query can be used to effectively demote results that match a given query. Unlike the "NOT" clause in bool query, this still diff --git a/docs/reference/query-dsl/common-terms-query.asciidoc b/docs/reference/query-dsl/common-terms-query.asciidoc index 1c5fa7cc1f6..b9aee04e401 100644 --- a/docs/reference/query-dsl/common-terms-query.asciidoc +++ b/docs/reference/query-dsl/common-terms-query.asciidoc @@ -1,12 +1,12 @@ [[query-dsl-common-terms-query]] -== Common Terms Query +=== Common Terms Query The `common` terms query is a modern alternative to stopwords which improves the precision and recall of search results (by taking stopwords into account), without sacrificing performance. [float] -=== The problem +==== The problem Every term in a query has a cost. A search for `"The brown fox"` requires three term queries, one for each of `"the"`, `"brown"` and @@ -25,7 +25,7 @@ and `"not happy"`) and we lose recall (eg text like `"The The"` or `"To be or not to be"` would simply not exist in the index). [float] -=== The solution +==== The solution The `common` terms query divides the query terms into two groups: more important (ie _low frequency_ terms) and less important (ie _high @@ -63,7 +63,7 @@ site, common terms like `"clip"` or `"video"` will automatically behave as stopwords without the need to maintain a manual list. [float] -=== Examples +==== Examples In this example, words that have a document frequency greater than 0.1% (eg `"this"` and `"is"`) will be treated as _common terms_. diff --git a/docs/reference/query-dsl/compound-queries.asciidoc b/docs/reference/query-dsl/compound-queries.asciidoc new file mode 100644 index 00000000000..adce4bebd59 --- /dev/null +++ b/docs/reference/query-dsl/compound-queries.asciidoc @@ -0,0 +1,69 @@ +[[compound-queries]] +== Compound queries + +Compound queries wrap other compound or leaf queries, either to combine their +results and scores, to change their behaviour, or to switch from query to +filter context. + +The queries in this group are: + +<>:: + +A query which wraps another query, but executes it in filter context. All +matching documents are given the same ``constant'' `_score`. + +<>:: + +The default query for combining multiple leaf or compound query clauses, as +`must`, `should`, `must_not`, or `filter` clauses. The `must` and `should` +clauses have their scores combined -- the more matching clauses, the better -- +while the `must_not` and `filter` clauses are executed in filter context. + +<>:: + +A query which accepts multiple queries, and returns any documents which match +any of the query clauses. While the `bool` query combines the scores from all +matching queries, the `dis_max` query uses the score of the single best- +matching query clause. + +<>:: + +Modify the scores returned by the main query with functions to take into +account factors like popularity, recency, distance, or custom algorithms +implemented with scripting. + +<>:: + +Return documents which match a `positive` query, but reduce the score of +documents which also match a `negative` query. + +<>:: + +Execute one query for the specified indices, and another for other indices. + +<>, <>, <>:: + +Synonyms for the `bool` query. + +<>:: + +Combine a query clause in query context with another in filter context. deprecated[2.0.0,Use the `bool` query instead] + +<>:: + +Limits the number of documents examined per shard. deprecated[1.6.0] + + +include::constant-score-query.asciidoc[] +include::bool-query.asciidoc[] +include::dis-max-query.asciidoc[] +include::function-score-query.asciidoc[] +include::boosting-query.asciidoc[] +include::indices-query.asciidoc[] +include::and-query.asciidoc[] +include::not-query.asciidoc[] +include::or-query.asciidoc[] +include::filtered-query.asciidoc[] +include::limit-query.asciidoc[] + + diff --git a/docs/reference/query-dsl/constant-score-query.asciidoc b/docs/reference/query-dsl/constant-score-query.asciidoc index e31b7b801a5..8e76ac13ff5 100644 --- a/docs/reference/query-dsl/constant-score-query.asciidoc +++ b/docs/reference/query-dsl/constant-score-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-constant-score-query]] -== Constant Score Query +=== Constant Score Query A query that wraps another query and simply returns a constant score equal to the query boost for every document in the diff --git a/docs/reference/query-dsl/dis-max-query.asciidoc b/docs/reference/query-dsl/dis-max-query.asciidoc index ae8445ca5dd..2938c8db8ea 100644 --- a/docs/reference/query-dsl/dis-max-query.asciidoc +++ b/docs/reference/query-dsl/dis-max-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-dis-max-query]] -== Dis Max Query +=== Dis Max Query A query that generates the union of documents produced by its subqueries, and that scores each document with the maximum score for diff --git a/docs/reference/query-dsl/exists-query.asciidoc b/docs/reference/query-dsl/exists-query.asciidoc index 02460da7b65..d25ebdecd89 100644 --- a/docs/reference/query-dsl/exists-query.asciidoc +++ b/docs/reference/query-dsl/exists-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-exists-query]] -== Exists Query +=== Exists Query Returns documents that have at least one non-`null` value in the original field: @@ -42,7 +42,7 @@ These documents would *not* match the above query: <3> The `user` field is missing completely. [float] -==== `null_value` mapping +===== `null_value` mapping If the field mapping includes the `null_value` setting (see <>) then explicit `null` values are replaced with the specified `null_value`. For diff --git a/docs/reference/query-dsl/filtered-query.asciidoc b/docs/reference/query-dsl/filtered-query.asciidoc index 094d3edefc1..d12d32a974d 100644 --- a/docs/reference/query-dsl/filtered-query.asciidoc +++ b/docs/reference/query-dsl/filtered-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-filtered-query]] -== Filtered Query +=== Filtered Query deprecated[2.0.0, Use the `bool` query instead with a `must` clause for the query and a `filter` clause for the filter] @@ -47,7 +47,7 @@ curl -XGET localhost:9200/_search -d ' <1> The `filtered` query is passed as the value of the `query` parameter in the search request. -=== Filtering without a query +==== Filtering without a query If a `query` is not specified, it defaults to the <>. This means that the @@ -71,7 +71,7 @@ curl -XGET localhost:9200/_search -d ' <1> No `query` has been specified, so this request applies just the filter, returning all documents created since yesterday. -==== Multiple filters +===== Multiple filters Multiple filters can be applied by wrapping them in a <>, for example: @@ -95,7 +95,7 @@ Multiple filters can be applied by wrapping them in a } -------------------------------------------------- -==== Filter strategy +===== Filter strategy You can control how the filter and query are executed with the `strategy` parameter: diff --git a/docs/reference/query-dsl/full-text-queries.asciidoc b/docs/reference/query-dsl/full-text-queries.asciidoc new file mode 100644 index 00000000000..9c61f9f5681 --- /dev/null +++ b/docs/reference/query-dsl/full-text-queries.asciidoc @@ -0,0 +1,44 @@ +[[full-text-queries]] +== Full text queries + +The high-level full text queries are usually used for running full text +queries on full text fields like the body of an email. They understand how the +field being queried is <> and will apply each field's +`analyzer` (or `search_analyzer`) to the query string before executing. + +The queries in this group are: + +<>:: + + The standard query for performing full text queries, including fuzzy matching + and phrase or proximity queries. + +<>:: + + The multi-field version of the `match` query. + +<>:: + + A more specialized query which gives more preference to uncommon words. + +<>:: + + Supports the compact Lucene <>, + allowing you to specify AND|OR|NOT conditions and multi-field search + within a single query string. For expert users only. + +<>:: + + A simpler, more robust version of the `query_string` syntax suitable + for exposing directly to users. + +include::match-query.asciidoc[] + +include::multi-match-query.asciidoc[] + +include::common-terms-query.asciidoc[] + +include::query-string-query.asciidoc[] + +include::simple-query-string-query.asciidoc[] + diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index a5618a23c48..5d1ee98f83a 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -1,15 +1,13 @@ [[query-dsl-function-score-query]] -== Function Score Query +=== Function Score Query The `function_score` allows you to modify the score of documents that are retrieved by a query. This can be useful if, for example, a score function is computationally expensive and it is sufficient to compute the score on a filtered set of documents. -=== Using function score - To use `function_score`, the user has to define a query and one or -several functions, that compute a new score for each document returned +more functions, that compute a new score for each document returned by the query. `function_score` can be used with only one function like this: @@ -89,13 +87,13 @@ query. The parameter `boost_mode` defines how: `min`:: min of query score and function score By default, modifying the score does not change which documents match. To exclude -documents that do not meet a certain score threshold the `min_score` parameter can be set to the desired score threshold. +documents that do not meet a certain score threshold the `min_score` parameter can be set to the desired score threshold. ==== Score functions The `function_score` query provides several types of score functions. -===== Script score +====== Script score The `script_score` function allows you to wrap another query and customize the scoring of it optionally with a computation derived from other numeric @@ -135,7 +133,7 @@ Note that unlike the `custom_score` query, the score of the query is multiplied with the result of the script scoring. If you wish to inhibit this, set `"boost_mode": "replace"` -===== Weight +====== Weight The `weight` score allows you to multiply the score by the provided `weight`. This can sometimes be desired since boost value set on @@ -147,7 +145,7 @@ not. "weight" : number -------------------------------------------------- -===== Random +====== Random The `random_score` generates scores using a hash of the `_uid` field, with a `seed` for variation. If `seed` is not specified, the current @@ -163,7 +161,7 @@ be a memory intensive operation since the values are unique. } -------------------------------------------------- -===== Field Value factor +====== Field Value factor The `field_value_factor` function allows you to use a field from a document to influence the score. It's similar to using the `script_score` function, however, @@ -207,7 +205,7 @@ is an illegal operation, and an exception will be thrown. Be sure to limit the values of the field with a range filter to avoid this, or use `log1p` and `ln1p`. -===== Decay functions +====== Decay functions Decay functions score a document with a function that decays depending on the distance of a numeric field value of the document from a user @@ -254,13 +252,13 @@ The `offset` and `decay` parameters are optional. [horizontal] `origin`:: - The point of origin used for calculating distance. Must be given as a - number for numeric field, date for date fields and geo point for geo fields. + The point of origin used for calculating distance. Must be given as a + number for numeric field, date for date fields and geo point for geo fields. Required for geo and numeric field. For date fields the default is `now`. Date math (for example `now-1h`) is supported for origin. `scale`:: - Required for all types. Defines the distance from origin at which the computed + Required for all types. Defines the distance from origin at which the computed score will equal `decay` parameter. For geo fields: Can be defined as number+unit (1km, 12m,...). Default unit is meters. For date fields: Can to be defined as a number+unit ("1h", "10d",...). Default unit is milliseconds. For numeric field: Any number. @@ -334,7 +332,7 @@ For single functions the three decay functions together with their parameters ca image:images/decay_2d.png[width=600] -===== Multiple values: +====== Multiple values: If a field used for computing the decay contains multiple values, per default the value closest to the origin is chosen for determining the distance. This can be changed by setting `multi_value_mode`. @@ -360,7 +358,7 @@ Example: -==== Detailed example +===== Detailed example Suppose you are searching for a hotel in a certain town. Your budget is limited. Also, you would like the hotel to be close to the town center, @@ -450,7 +448,7 @@ curl 'localhost:9200/hotels/_search/' -d '{ Next, we show how the computed score looks like for each of the three possible decay functions. -===== Normal decay, keyword `gauss` +====== Normal decay, keyword `gauss` When choosing `gauss` as the decay function in the above example, the contour and surface plot of the multiplier looks like this: @@ -471,7 +469,7 @@ of 0.56. "BnB Bellevue" and "Backback Nap" are both pretty close to the defined location but "BnB Bellevue" is cheaper, so it gets a multiplier of 0.86 whereas "Backpack Nap" gets a value of 0.66. -===== Exponential decay, keyword `exp` +====== Exponential decay, keyword `exp` When choosing `exp` as the decay function in the above example, the contour and surface plot of the multiplier looks like this: @@ -480,7 +478,7 @@ image::https://f.cloud.github.com/assets/4320215/768161/082975c0-e899-11e2-86f7- image::https://f.cloud.github.com/assets/4320215/768162/0b606884-e899-11e2-907b-aefc77eefef6.png[width="700px"] -===== Linear' decay, keyword `linear` +====== Linear' decay, keyword `linear` When choosing `linear` as the decay function in the above example, the contour and surface plot of the multiplier looks like this: @@ -489,12 +487,12 @@ image::https://f.cloud.github.com/assets/4320215/768164/1775b0ca-e899-11e2-9f4a- image::https://f.cloud.github.com/assets/4320215/768165/19d8b1aa-e899-11e2-91bc-6b0553e8d722.png[width="700px"] -==== Supported fields for decay functions +===== Supported fields for decay functions Only single valued numeric fields, including time and geo locations, are supported. -==== What if a field is missing? +===== What if a field is missing? If the numeric field is missing in the document, the function will return 1. diff --git a/docs/reference/query-dsl/fuzzy-query.asciidoc b/docs/reference/query-dsl/fuzzy-query.asciidoc index 0da33928855..b8af3aee066 100644 --- a/docs/reference/query-dsl/fuzzy-query.asciidoc +++ b/docs/reference/query-dsl/fuzzy-query.asciidoc @@ -1,10 +1,10 @@ [[query-dsl-fuzzy-query]] -== Fuzzy Query +=== Fuzzy Query The fuzzy query uses similarity based on Levenshtein edit distance for `string` fields, and a `+/-` margin on numeric and date fields. -=== String fields +==== String fields The `fuzzy` query generates all possible matching terms that are within the maximum edit distance specified in `fuzziness` and then checks the term @@ -38,7 +38,7 @@ Or with more advanced settings: -------------------------------------------------- [float] -==== Parameters +===== Parameters [horizontal] `fuzziness`:: @@ -62,7 +62,7 @@ are both set to `0`. This could cause every term in the index to be examined! [float] -=== Numeric and date fields +==== Numeric and date fields Performs a <> ``around'' the value using the `fuzziness` value as a `+/-` range, where: diff --git a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc index deb0727d61f..e2d404b69e1 100644 --- a/docs/reference/query-dsl/geo-bounding-box-query.asciidoc +++ b/docs/reference/query-dsl/geo-bounding-box-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-geo-bounding-box-query]] -== Geo Bounding Box Query +=== Geo Bounding Box Query A query allowing to filter hits based on a point location using a bounding box. Assuming the following indexed document: @@ -45,13 +45,13 @@ Then the following simple query can be executed with a -------------------------------------------------- [float] -=== Accepted Formats +==== Accepted Formats In much the same way the geo_point type can accept different representation of the geo point, the filter can accept it as well: [float] -==== Lat Lon As Properties +===== Lat Lon As Properties [source,js] -------------------------------------------------- @@ -79,7 +79,7 @@ representation of the geo point, the filter can accept it as well: -------------------------------------------------- [float] -==== Lat Lon As Array +===== Lat Lon As Array Format in `[lon, lat]`, note, the order of lon/lat here in order to conform with http://geojson.org/[GeoJSON]. @@ -104,7 +104,7 @@ conform with http://geojson.org/[GeoJSON]. -------------------------------------------------- [float] -==== Lat Lon As String +===== Lat Lon As String Format in `lat,lon`. @@ -128,7 +128,7 @@ Format in `lat,lon`. -------------------------------------------------- [float] -==== Geohash +===== Geohash [source,js] -------------------------------------------------- @@ -150,7 +150,7 @@ Format in `lat,lon`. -------------------------------------------------- [float] -=== Vertices +==== Vertices The vertices of the bounding box can either be set by `top_left` and `bottom_right` or by `top_right` and `bottom_left` parameters. More @@ -182,20 +182,20 @@ values separately. [float] -=== geo_point Type +==== geo_point Type The filter *requires* the `geo_point` type to be set on the relevant field. [float] -=== Multi Location Per Document +==== Multi Location Per Document The filter can work with multiple locations / points per document. Once a single location / point matches the filter, the document will be included in the filter [float] -=== Type +==== Type The type of the bounding box execution by default is set to `memory`, which means in memory checks if the doc falls within the bounding box diff --git a/docs/reference/query-dsl/geo-distance-query.asciidoc b/docs/reference/query-dsl/geo-distance-query.asciidoc index 2fe109afd76..a0f0d3163c0 100644 --- a/docs/reference/query-dsl/geo-distance-query.asciidoc +++ b/docs/reference/query-dsl/geo-distance-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-geo-distance-query]] -== Geo Distance Query +=== Geo Distance Query Filters documents that include only hits that exists within a specific distance from a geo point. Assuming the following indexed json: @@ -40,13 +40,13 @@ filter: -------------------------------------------------- [float] -=== Accepted Formats +==== Accepted Formats In much the same way the `geo_point` type can accept different representation of the geo point, the filter can accept it as well: [float] -==== Lat Lon As Properties +===== Lat Lon As Properties [source,js] -------------------------------------------------- @@ -69,7 +69,7 @@ representation of the geo point, the filter can accept it as well: -------------------------------------------------- [float] -==== Lat Lon As Array +===== Lat Lon As Array Format in `[lon, lat]`, note, the order of lon/lat here in order to conform with http://geojson.org/[GeoJSON]. @@ -92,7 +92,7 @@ conform with http://geojson.org/[GeoJSON]. -------------------------------------------------- [float] -==== Lat Lon As String +===== Lat Lon As String Format in `lat,lon`. @@ -114,7 +114,7 @@ Format in `lat,lon`. -------------------------------------------------- [float] -==== Geohash +===== Geohash [source,js] -------------------------------------------------- @@ -134,7 +134,7 @@ Format in `lat,lon`. -------------------------------------------------- [float] -=== Options +==== Options The following are options allowed on the filter: @@ -160,13 +160,13 @@ The following are options allowed on the filter: [float] -=== geo_point Type +==== geo_point Type The filter *requires* the `geo_point` type to be set on the relevant field. [float] -=== Multi Location Per Document +==== Multi Location Per Document The `geo_distance` filter can work with multiple locations / points per document. Once a single location / point matches the filter, the diff --git a/docs/reference/query-dsl/geo-distance-range-query.asciidoc b/docs/reference/query-dsl/geo-distance-range-query.asciidoc index 8109f3c9452..855159bd3a2 100644 --- a/docs/reference/query-dsl/geo-distance-range-query.asciidoc +++ b/docs/reference/query-dsl/geo-distance-range-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-geo-distance-range-query]] -== Geo Distance Range Query +=== Geo Distance Range Query Filters documents that exists within a range from a specific point: diff --git a/docs/reference/query-dsl/geo-polygon-query.asciidoc b/docs/reference/query-dsl/geo-polygon-query.asciidoc index ea22643ebe6..b9b624bda4c 100644 --- a/docs/reference/query-dsl/geo-polygon-query.asciidoc +++ b/docs/reference/query-dsl/geo-polygon-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-geo-polygon-query]] -== Geo Polygon Query +=== Geo Polygon Query A query allowing to include hits that only fall within a polygon of points. Here is an example: @@ -27,10 +27,10 @@ points. Here is an example: -------------------------------------------------- [float] -=== Allowed Formats +==== Allowed Formats [float] -==== Lat Long as Array +===== Lat Long as Array Format in `[lon, lat]`, note, the order of lon/lat here in order to conform with http://geojson.org/[GeoJSON]. @@ -58,7 +58,7 @@ conform with http://geojson.org/[GeoJSON]. -------------------------------------------------- [float] -==== Lat Lon as String +===== Lat Lon as String Format in `lat,lon`. @@ -85,7 +85,7 @@ Format in `lat,lon`. -------------------------------------------------- [float] -==== Geohash +===== Geohash [source,js] -------------------------------------------------- @@ -110,7 +110,7 @@ Format in `lat,lon`. -------------------------------------------------- [float] -=== geo_point Type +==== geo_point Type The filter *requires* the <> type to be diff --git a/docs/reference/query-dsl/geo-queries.asciidoc b/docs/reference/query-dsl/geo-queries.asciidoc new file mode 100644 index 00000000000..01f42831e00 --- /dev/null +++ b/docs/reference/query-dsl/geo-queries.asciidoc @@ -0,0 +1,50 @@ +[[geo-queries]] +== Geo queries + +Elasticsearch supports two types of geo data: +<> fields which support lat/lon pairs, and +<> fields, which support points, +lines, circles, polygons, multi-polygons etc. + +The queries in this group are: + +<> query:: + + Find document with geo-shapes which either intersect, are contained by, or + do not interesect with the specified geo-shape. + +<> query:: + + Finds documents with geo-points that fall into the specified rectangle. + +<> query:: + + Finds document with geo-points within the specified distance of a central + point. + +<> query:: + + Like the `geo_point` query, but the range starts at a specified distance + from the central point. + +<> query:: + + Find documents with geo-points within the specified polygon. + +<> query:: + + Find geo-points whose geohash intersects with the geohash of the specified + point. + + +include::geo-shape-query.asciidoc[] + +include::geo-bounding-box-query.asciidoc[] + +include::geo-distance-query.asciidoc[] + +include::geo-distance-range-query.asciidoc[] + +include::geo-polygon-query.asciidoc[] + +include::geohash-cell-query.asciidoc[] diff --git a/docs/reference/query-dsl/geo-shape-query.asciidoc b/docs/reference/query-dsl/geo-shape-query.asciidoc index 0addabc342a..94c085c0902 100644 --- a/docs/reference/query-dsl/geo-shape-query.asciidoc +++ b/docs/reference/query-dsl/geo-shape-query.asciidoc @@ -1,17 +1,15 @@ [[query-dsl-geo-shape-query]] -== GeoShape Filter +=== GeoShape Filter Filter documents indexed using the `geo_shape` type. -Requires the <>. +Requires the <>. The `geo_shape` query uses the same grid square representation as the geo_shape mapping to find documents that have a shape that intersects with the query shape. It will also use the same PrefixTree configuration as defined for the field mapping. -[float] ==== Filter Format The Filter supports two ways of defining the Filter shape, either by @@ -19,8 +17,7 @@ providing a whole shape definition, or by referencing the name of a shape pre-indexed in another index. Both formats are defined below with examples. -[float] -===== Provided Shape Definition +====== Provided Shape Definition Similar to the `geo_shape` type, the `geo_shape` Filter uses http://www.geojson.org[GeoJSON] to represent shapes. @@ -64,7 +61,6 @@ The following query will find the point using the Elasticsearch's } -------------------------------------------------- -[float] ===== Pre-Indexed Shape The Filter also supports using a shape which has already been indexed in diff --git a/docs/reference/query-dsl/geohash-cell-query.asciidoc b/docs/reference/query-dsl/geohash-cell-query.asciidoc index b0004b64dee..8b75f3c60b4 100644 --- a/docs/reference/query-dsl/geohash-cell-query.asciidoc +++ b/docs/reference/query-dsl/geohash-cell-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-geohash-cell-query]] -== Geohash Cell Query +=== Geohash Cell Query The `geohash_cell` query provides access to a hierarchy of geohashes. By defining a geohash cell, only <> diff --git a/docs/reference/query-dsl/has-child-query.asciidoc b/docs/reference/query-dsl/has-child-query.asciidoc index 606c6a58459..6f42e6f6df6 100644 --- a/docs/reference/query-dsl/has-child-query.asciidoc +++ b/docs/reference/query-dsl/has-child-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-has-child-query]] -== Has Child Query +=== Has Child Query The `has_child` filter accepts a query and the child type to run against, and results in parent documents that have child docs matching the query. Here is @@ -20,7 +20,7 @@ an example: -------------------------------------------------- [float] -=== Scoring capabilities +==== Scoring capabilities The `has_child` also has scoring support. The supported score types are `min`, `max`, `sum`, `avg` or `none`. The default is @@ -46,7 +46,7 @@ inside the `has_child` query: -------------------------------------------------- [float] -=== Min/Max Children +==== Min/Max Children The `has_child` query allows you to specify that a minimum and/or maximum number of children are required to match for the parent doc to be considered diff --git a/docs/reference/query-dsl/has-parent-query.asciidoc b/docs/reference/query-dsl/has-parent-query.asciidoc index cbd4fb9358e..70e9ba524bc 100644 --- a/docs/reference/query-dsl/has-parent-query.asciidoc +++ b/docs/reference/query-dsl/has-parent-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-has-parent-query]] -== Has Parent Query +=== Has Parent Query The `has_parent` query accepts a query and a parent type. The query is executed in the parent document space, which is specified by the parent @@ -22,7 +22,7 @@ in the same manner as the `has_child` query. -------------------------------------------------- [float] -=== Scoring capabilities +==== Scoring capabilities The `has_parent` also has scoring support. The supported score types are `score` or `none`. The default is `none` and diff --git a/docs/reference/query-dsl/ids-query.asciidoc b/docs/reference/query-dsl/ids-query.asciidoc index 8811b8fa767..7d08243a78f 100644 --- a/docs/reference/query-dsl/ids-query.asciidoc +++ b/docs/reference/query-dsl/ids-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-ids-query]] -== Ids Query +=== Ids Query Filters documents that only have the provided ids. Note, this query uses the <> field. diff --git a/docs/reference/query-dsl/index.asciidoc b/docs/reference/query-dsl/index.asciidoc index d2fa27c719b..ecb520443bb 100644 --- a/docs/reference/query-dsl/index.asciidoc +++ b/docs/reference/query-dsl/index.asciidoc @@ -1,3 +1,5 @@ +include::query_filter_context.asciidoc[] + include::match-query.asciidoc[] include::multi-match-query.asciidoc[] diff --git a/docs/reference/query-dsl/indices-query.asciidoc b/docs/reference/query-dsl/indices-query.asciidoc index 651d3f22f61..f8e9e58d117 100644 --- a/docs/reference/query-dsl/indices-query.asciidoc +++ b/docs/reference/query-dsl/indices-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-indices-query]] -== Indices Query +=== Indices Query The `indices` query can be used when executed across multiple indices, allowing to have a query that executes only when executed on an index @@ -29,9 +29,9 @@ documents), and `all` (to match all). Defaults to `all`. `query` is mandatory, as well as `indices` (or `index`). [TIP] -=================================================================== +==================================================================== The fields order is important: if the `indices` are provided before `query` or `no_match_query`, the related queries get parsed only against the indices that they are going to be executed on. This is useful to avoid parsing queries when it is not necessary and prevent potential mapping errors. -=================================================================== +==================================================================== diff --git a/docs/reference/query-dsl/joining-queries.asciidoc b/docs/reference/query-dsl/joining-queries.asciidoc new file mode 100644 index 00000000000..a230dedae92 --- /dev/null +++ b/docs/reference/query-dsl/joining-queries.asciidoc @@ -0,0 +1,32 @@ +[[joining-queries]] +== Joining queries + +Performing full SQL-style joins in a distributed system like Elasticsearch is +prohibitively expensive. Instead, Elasticsearch offers two forms of join +which are designed to scale horizontally. + +<>:: + +Documents may contains fields of type <>. These +fields are used to index arrays of objects, where each object can be queried +(with the `nested` query) as an independent document. + +<> and <> queries:: + +A <> can exist between two +document types within a single index. The `has_child` query returns parent +documents whose child documents match the specified query, while the +`has_parent` query returns child documents whose parent document matches the +specified query. + +Also see the <> in the `terms` +query, which allows you to build a `terms` query from values contained in +another document. + +include::nested-query.asciidoc[] + +include::has-child-query.asciidoc[] + +include::has-parent-query.asciidoc[] + + diff --git a/docs/reference/query-dsl/limit-query.asciidoc b/docs/reference/query-dsl/limit-query.asciidoc index 52140296c2c..1cfb0b852a9 100644 --- a/docs/reference/query-dsl/limit-query.asciidoc +++ b/docs/reference/query-dsl/limit-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-limit-query]] -== Limit Query +=== Limit Query deprecated[1.6.0, Use <> instead] diff --git a/docs/reference/query-dsl/match-all-query.asciidoc b/docs/reference/query-dsl/match-all-query.asciidoc index 85b0a2ff95e..b7f4251f0cd 100644 --- a/docs/reference/query-dsl/match-all-query.asciidoc +++ b/docs/reference/query-dsl/match-all-query.asciidoc @@ -1,20 +1,17 @@ [[query-dsl-match-all-query]] == Match All Query -A query that matches all documents. Maps to Lucene `MatchAllDocsQuery`. +The most simple query, which matches all documents, giving them all a `_score` +of `1.0`. [source,js] -------------------------------------------------- -{ - "match_all" : { } -} +{ "match_all": {} } -------------------------------------------------- -Which can also have boost associated with it: +The `_score` can be changed with the `boost` parameter: [source,js] -------------------------------------------------- -{ - "match_all" : { "boost" : 1.2 } -} +{ "match_all": { "boost" : 1.2 }} -------------------------------------------------- diff --git a/docs/reference/query-dsl/match-query.asciidoc b/docs/reference/query-dsl/match-query.asciidoc index 986228ce4eb..e06f2e92a5a 100644 --- a/docs/reference/query-dsl/match-query.asciidoc +++ b/docs/reference/query-dsl/match-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-match-query]] -== Match Query +=== Match Query A family of `match` queries that accept text/numerics/dates, analyzes it, and constructs a query out of it. For example: @@ -16,10 +16,8 @@ it, and constructs a query out of it. For example: Note, `message` is the name of a field, you can substitute the name of any field (including `_all`) instead. -[float] -=== Types of Match Queries +There are three types of `match` query: `boolean`, `phrase`, and `phrase_prefix`: -[float] [[query-dsl-match-query-boolean]] ==== boolean @@ -40,8 +38,7 @@ data-type mismatches, such as trying to query a numeric field with a text query string. Defaults to `false`. [[query-dsl-match-query-fuzziness]] -[float] -===== Fuzziness +====== Fuzziness `fuzziness` allows _fuzzy matching_ based on the type of field being queried. See <> for allowed settings. @@ -69,7 +66,6 @@ change in structure, `message` is the field name): -------------------------------------------------- [[query-dsl-match-query-zero]] -[float] ===== Zero terms query If the analyzer used removes all tokens in a query like a `stop` filter does, the default behavior is to match no documents at all. In order to @@ -90,7 +86,6 @@ change that the `zero_terms_query` option can be used, which accepts -------------------------------------------------- [[query-dsl-match-query-cutoff]] -[float] ===== Cutoff frequency The match query supports a `cutoff_frequency` that allows @@ -132,7 +127,6 @@ that when trying it out on test indexes with low document numbers you should follow the advice in {defguide}/relevance-is-broken.html[Relevance is broken]. [[query-dsl-match-query-phrase]] -[float] ==== phrase The `match_phrase` query analyzes the text and creates a `phrase` query @@ -181,9 +175,8 @@ definition, or the default search analyzer, for example: } -------------------------------------------------- -[float] [[query-dsl-match-query-phrase-prefix]] -===== match_phrase_prefix +==== match_phrase_prefix The `match_phrase_prefix` is the same as `match_phrase`, except that it allows for prefix matches on the last term in the text. For example: diff --git a/docs/reference/query-dsl/missing-query.asciidoc b/docs/reference/query-dsl/missing-query.asciidoc index 906823291f9..276722bd4c6 100644 --- a/docs/reference/query-dsl/missing-query.asciidoc +++ b/docs/reference/query-dsl/missing-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-missing-query]] -== Missing Query +=== Missing Query Returns documents that have only `null` values or no value in the original field: @@ -42,7 +42,7 @@ These documents would *not* match the above filter: <3> This field has one non-`null` value. [float] -=== `null_value` mapping +==== `null_value` mapping If the field mapping includes a `null_value` (see <>) then explicit `null` values are replaced with the specified `null_value`. For instance, if the `user` field were mapped @@ -75,7 +75,7 @@ no values in the `user` field and thus would match the `missing` filter: -------------------------------------------------- [float] -==== `existence` and `null_value` parameters +===== `existence` and `null_value` parameters When the field being queried has a `null_value` mapping, then the behaviour of the `missing` filter can be altered with the `existence` and `null_value` diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index d0097627e1d..37246db5023 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-mlt-query]] -== More Like This Query +=== More Like This Query The More Like This Query (MLT Query) finds documents that are "like" a given set of documents. In order to do so, MLT selects a set of representative terms @@ -87,7 +87,7 @@ present in the index, the syntax is similar to <> to allow multi-field queries: @@ -17,7 +17,7 @@ to allow multi-field queries: <2> The fields to be queried. [float] -=== `fields` and per-field boosting +==== `fields` and per-field boosting Fields can be specified with wildcards, eg: @@ -47,7 +47,7 @@ Individual fields can be boosted with the caret (`^`) notation: [[multi-match-types]] [float] -=== Types of `multi_match` query: +==== Types of `multi_match` query: The way the `multi_match` query is executed internally depends on the `type` parameter, which can be set to: @@ -70,7 +70,7 @@ parameter, which can be set to: combines the `_score` from each field. See <>. [[type-best-fields]] -=== `best_fields` +==== `best_fields` The `best_fields` type is most useful when you are searching for multiple words best found in the same field. For instance ``brown fox'' in a single @@ -121,7 +121,7 @@ and `cutoff_frequency`, as explained in <>. [IMPORTANT] [[operator-min]] .`operator` and `minimum_should_match` -================================================== +=================================================== The `best_fields` and `most_fields` types are _field-centric_ -- they generate a `match` query *per field*. This means that the `operator` and @@ -153,10 +153,10 @@ to match. See <> for a better solution. -================================================== +=================================================== [[type-most-fields]] -=== `most_fields` +==== `most_fields` The `most_fields` type is most useful when querying multiple fields that contain the same text analyzed in different ways. For instance, the main @@ -203,7 +203,7 @@ and `cutoff_frequency`, as explained in <>, b *see <>*. [[type-phrase]] -=== `phrase` and `phrase_prefix` +==== `phrase` and `phrase_prefix` The `phrase` and `phrase_prefix` types behave just like <>, but they use a `match_phrase` or `match_phrase_prefix` query instead of a @@ -240,7 +240,7 @@ in <>. Type `phrase_prefix` additionally accepts `max_expansions`. [[type-cross-fields]] -=== `cross_fields` +==== `cross_fields` The `cross_fields` type is particularly useful with structured documents where multiple fields *should* match. For instance, when querying the `first_name` @@ -317,7 +317,7 @@ Also, accepts `analyzer`, `boost`, `operator`, `minimum_should_match`, `zero_terms_query` and `cutoff_frequency`, as explained in <>. -==== `cross_field` and analysis +===== `cross_field` and analysis The `cross_field` type can only work in term-centric mode on fields that have the same analyzer. Fields with the same analyzer are grouped together as in @@ -411,7 +411,7 @@ which will be executed as: blended("will", fields: [first, first.edge, last.edge, last]) blended("smith", fields: [first, first.edge, last.edge, last]) -==== `tie_breaker` +===== `tie_breaker` By default, each per-term `blended` query will use the best score returned by any field in a group, then these scores are added together to give the final diff --git a/docs/reference/query-dsl/nested-query.asciidoc b/docs/reference/query-dsl/nested-query.asciidoc index 27e6b620f38..6460e027885 100644 --- a/docs/reference/query-dsl/nested-query.asciidoc +++ b/docs/reference/query-dsl/nested-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-nested-query]] -== Nested Query +=== Nested Query Nested query allows to query nested objects / docs (see <>). The diff --git a/docs/reference/query-dsl/not-query.asciidoc b/docs/reference/query-dsl/not-query.asciidoc index 414722aaf2b..a74a0f11734 100644 --- a/docs/reference/query-dsl/not-query.asciidoc +++ b/docs/reference/query-dsl/not-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-not-query]] -== Not Query +=== Not Query A query that filters out matched documents using a query. For example: diff --git a/docs/reference/query-dsl/or-query.asciidoc b/docs/reference/query-dsl/or-query.asciidoc index ad85faabe3e..8a75b625d44 100644 --- a/docs/reference/query-dsl/or-query.asciidoc +++ b/docs/reference/query-dsl/or-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-or-query]] -== Or Query +=== Or Query deprecated[2.0.0, Use the `bool` query instead] diff --git a/docs/reference/query-dsl/prefix-query.asciidoc b/docs/reference/query-dsl/prefix-query.asciidoc index b1a3a1e9611..cf26e850ad8 100644 --- a/docs/reference/query-dsl/prefix-query.asciidoc +++ b/docs/reference/query-dsl/prefix-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-prefix-query]] -== Prefix Query +=== Prefix Query Matches documents that have fields containing terms with a specified prefix (*not analyzed*). The prefix query maps to Lucene `PrefixQuery`. diff --git a/docs/reference/query-dsl/query-string-query.asciidoc b/docs/reference/query-dsl/query-string-query.asciidoc index 9df7998c961..0f0763f65c5 100644 --- a/docs/reference/query-dsl/query-string-query.asciidoc +++ b/docs/reference/query-dsl/query-string-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-query-string-query]] -== Query String Query +=== Query String Query A query that uses a query parser in order to parse its content. Here is an example: @@ -89,7 +89,7 @@ rewritten using the parameter. [float] -=== Default Field +==== Default Field When not explicitly specifying the field to search on in the query string syntax, the `index.query.default_field` will be used to derive @@ -99,7 +99,7 @@ So, if `_all` field is disabled, it might make sense to change it to set a different default field. [float] -=== Multi Field +==== Multi Field The `query_string` query can also run against multiple fields. Fields can be provided via the `"fields"` parameter (example below). diff --git a/docs/reference/query-dsl/query-string-syntax.asciidoc b/docs/reference/query-dsl/query-string-syntax.asciidoc index d1f3be8c0db..17198a1991f 100644 --- a/docs/reference/query-dsl/query-string-syntax.asciidoc +++ b/docs/reference/query-dsl/query-string-syntax.asciidoc @@ -1,6 +1,6 @@ [[query-string-syntax]] -=== Query string syntax +==== Query string syntax The query string ``mini-language'' is used by the <> and by the @@ -14,7 +14,7 @@ phrase, in the same order. Operators allow you to customize the search -- the available options are explained below. -==== Field names +===== Field names As mentioned in <>, the `default_field` is searched for the search terms, but it is possible to specify other fields in the query syntax: @@ -46,7 +46,7 @@ search terms, but it is possible to specify other fields in the query syntax: _exists_:title -==== Wildcards +===== Wildcards Wildcard searches can be run on individual terms, using `?` to replace a single character, and `*` to replace zero or more characters: @@ -58,12 +58,12 @@ perform very badly -- just think how many terms need to be queried to match the query string `"a* b* c*"`. [WARNING] -====== +======= Allowing a wildcard at the beginning of a word (eg `"*ing"`) is particularly heavy, because all terms in the index need to be examined, just in case they match. Leading wildcards can be disabled by setting `allow_leading_wildcard` to `false`. -====== +======= Wildcarded terms are not analyzed by default -- they are lowercased (`lowercase_expanded_terms` defaults to `true`) but no further analysis @@ -72,7 +72,7 @@ is missing some of its letters. However, by setting `analyze_wildcard` to `true`, an attempt will be made to analyze wildcarded words before searching the term list for matching terms. -==== Regular expressions +===== Regular expressions Regular expression patterns can be embedded in the query string by wrapping them in forward-slashes (`"/"`): @@ -82,7 +82,7 @@ wrapping them in forward-slashes (`"/"`): The supported regular expression syntax is explained in <>. [WARNING] -====== +======= The `allow_leading_wildcard` parameter does not have any control over regular expressions. A query string such as the following would force Elasticsearch to visit every term in the index: @@ -90,9 +90,9 @@ Elasticsearch to visit every term in the index: /.*n/ Use with caution! -====== +======= -==== Fuzziness +===== Fuzziness We can search for terms that are similar to, but not exactly like our search terms, using the ``fuzzy'' @@ -112,7 +112,7 @@ sufficient to catch 80% of all human misspellings. It can be specified as: quikc~1 -==== Proximity searches +===== Proximity searches While a phrase query (eg `"john smith"`) expects all of the terms in exactly the same order, a proximity query allows the specified words to be further @@ -127,7 +127,7 @@ query string, the more relevant that document is considered to be. When compared to the above example query, the phrase `"quick fox"` would be considered more relevant than `"quick brown fox"`. -==== Ranges +===== Ranges Ranges can be specified for date, numeric or string fields. Inclusive ranges are specified with square brackets `[min TO max]` and exclusive ranges with @@ -168,20 +168,20 @@ Ranges with one side unbounded can use the following syntax: age:<=10 [NOTE] -=================================================================== +==================================================================== To combine an upper and lower bound with the simplified syntax, you would need to join two clauses with an `AND` operator: age:(>=10 AND <20) age:(+>=10 +<20) -=================================================================== +==================================================================== The parsing of ranges in query strings can be complex and error prone. It is much more reliable to use an explicit <>. -==== Boosting +===== Boosting Use the _boost_ operator `^` to make one term more relevant than another. For instance, if we want to find all documents about foxes, but we are @@ -196,7 +196,7 @@ Boosts can also be applied to phrases or to groups: "john smith"^2 (foo bar)^4 -==== Boolean operators +===== Boolean operators By default, all terms are optional, as long as one term matches. A search for `foo bar baz` will find any document that contains one or more of @@ -256,7 +256,7 @@ would look like this: **** -==== Grouping +===== Grouping Multiple terms or clauses can be grouped together with parentheses, to form sub-queries: @@ -268,7 +268,7 @@ of a sub-query: status:(active OR pending) title:(full text search)^2 -==== Reserved characters +===== Reserved characters If you need to use any of the characters which function as operators in your query itself (and not as operators), then you should escape them with @@ -290,7 +290,7 @@ index is actually `"wifi"`. Escaping the space will protect it from being touched by the query string parser: `"wi\ fi"`. **** -==== Empty Query +===== Empty Query If the query string is empty or only contains whitespaces the query will yield an empty result set. diff --git a/docs/reference/query-dsl/query_filter_context.asciidoc b/docs/reference/query-dsl/query_filter_context.asciidoc new file mode 100644 index 00000000000..f6c3aa44750 --- /dev/null +++ b/docs/reference/query-dsl/query_filter_context.asciidoc @@ -0,0 +1,77 @@ +[[query-filter-context]] +== Query and filter context + +The behaviour of a query clause depends on whether it is used in _query context_ or +in _filter context_: + +Query context:: ++ +-- +A query clause used in query context answers the question ``__How well does this +document match this query clause?__'' Besides deciding whether or not the +document matches, the query clause also calculated a `_score` representing how +well the document matches, relative to other documents. + +Query context is in effect whenever a query clause is passed to a `query` parameter, +such as the `query` parameter in the <> API. +-- + +Filter context:: ++ +-- +In _filter_ context, a query clause answers the question ``__Does this document +match this query clause?__'' The answer is a simple Yes or No -- no scores are +calculated. Filter context is mostly used for filtering structured data, e.g. + +* __Does this +timestamp+ fall into the range 2015 to 2016?__ +* __Is the +status+ field set to ++"published"++__? + +Frequently used filters will be cached automatically by Elasticsearch, to +speed up performance. + +Filter context is in effect whenever a query clause is passed to a `filter` +parameter, such as the `filter` or `must_not` parameters in the +<> query, the `filter` parameter in the +<> query, or the +<> aggregation. +-- + +Below is an example of query clauses being used in query and filter context +in the `search` API. This query will match documents where all of the following +conditions are met: + +* The `title` field contains the word `search`. +* The `content` field contains the word `elasticsearch`. +* The `status` field contains the exact word `published`. +* The `publish_date` field contains a date from 1 Jan 2015 onwards. + +[source,json] +------------------------------------ +GET _search +{ + "query": { <1> + "bool": { <2> + "must": [ + { "match": { "title": "Search" }}, <2> + { "match": { "content": "Elasticsearch" }} <2> + ], + "filter": [ <3> + { "term": { "status": "published" }}, <4> + { "range": { "publish_date": { "gte": "2015-01-01" }}} <4> + ] + } + } +} +------------------------------------ +<1> The `query` parameter indicates query context. +<2> The `bool` and two `match` clauses are used in query context, + which means that they are used to score how well each document + matches. +<3> The `filter` parameter indicates filter context. +<4> The `term` and `range` clauses are used in filter context. + They will filter out documents which do not match, but they will + not affect the score for matching documents. + +TIP: Use query clauses in query context for conditions which should affect the +score of matching documents (i.e. how well does the document match), and use +all other query clauses in filter context. diff --git a/docs/reference/query-dsl/range-query.asciidoc b/docs/reference/query-dsl/range-query.asciidoc index ba627129dab..5d5bb6a308d 100644 --- a/docs/reference/query-dsl/range-query.asciidoc +++ b/docs/reference/query-dsl/range-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-range-query]] -== Range Query +=== Range Query Matches documents with fields that have terms within a certain range. The type of the Lucene query depends on the field type, for `string` @@ -30,7 +30,7 @@ The `range` query accepts the following parameters: `boost`:: Sets the boost value of the query, defaults to `1.0` [float] -=== Date options +==== Date options When applied on `date` fields the `range` filter accepts also a `time_zone` parameter. The `time_zone` parameter will be applied to your input lower and upper bounds and will diff --git a/docs/reference/query-dsl/regexp-query.asciidoc b/docs/reference/query-dsl/regexp-query.asciidoc index d0be5973b6c..692caf43480 100644 --- a/docs/reference/query-dsl/regexp-query.asciidoc +++ b/docs/reference/query-dsl/regexp-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-regexp-query]] -== Regexp Query +=== Regexp Query The `regexp` query allows you to use regular expression term queries. See <> for details of the supported regular expression language. diff --git a/docs/reference/query-dsl/regexp-syntax.asciidoc b/docs/reference/query-dsl/regexp-syntax.asciidoc index 51992591ea9..b261d922b98 100644 --- a/docs/reference/query-dsl/regexp-syntax.asciidoc +++ b/docs/reference/query-dsl/regexp-syntax.asciidoc @@ -1,17 +1,17 @@ [[regexp-syntax]] -=== Regular expression syntax +==== Regular expression syntax Regular expression queries are supported by the `regexp` and the `query_string` queries. The Lucene regular expression engine is not Perl-compatible but supports a smaller range of operators. [NOTE] -==== +===== We will not attempt to explain regular expressions, but just explain the supported operators. -==== +===== -==== Standard operators +===== Standard operators Anchoring:: + @@ -200,7 +200,7 @@ For string `"abcd"`: -- -===== Optional operators +====== Optional operators These operators are available by default as the `flags` parameter defaults to `ALL`. Different flag combinations (concatened with `"\"`) can be used to enable/disable diff --git a/docs/reference/query-dsl/script-query.asciidoc b/docs/reference/query-dsl/script-query.asciidoc index 4c307f2556f..c14e8142f7d 100644 --- a/docs/reference/query-dsl/script-query.asciidoc +++ b/docs/reference/query-dsl/script-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-script-query]] -== Script Query +=== Script Query A query allowing to define <> as filters. For @@ -20,7 +20,7 @@ example: ---------------------------------------------- [float] -=== Custom Parameters +==== Custom Parameters Scripts are compiled and cached for faster execution. If the same script can be used, just with different parameters provider, it is preferable diff --git a/docs/reference/query-dsl/simple-query-string-query.asciidoc b/docs/reference/query-dsl/simple-query-string-query.asciidoc index 338ee094fc0..092918b59f0 100644 --- a/docs/reference/query-dsl/simple-query-string-query.asciidoc +++ b/docs/reference/query-dsl/simple-query-string-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-simple-query-string-query]] -== Simple Query String Query +=== Simple Query String Query A query that uses the SimpleQueryParser to parse its context. Unlike the regular `query_string` query, the `simple_query_string` query will never @@ -57,7 +57,7 @@ Defaults to `ROOT`. |======================================================================= [float] -==== Simple Query String Syntax +===== Simple Query String Syntax The `simple_query_string` supports the following special characters: * `+` signifies AND operation @@ -73,7 +73,7 @@ In order to search for any of these special characters, they will need to be escaped with `\`. [float] -=== Default Field +==== Default Field When not explicitly specifying the field to search on in the query string syntax, the `index.query.default_field` will be used to derive which field to search on. It defaults to `_all` field. @@ -82,7 +82,7 @@ So, if `_all` field is disabled, it might make sense to change it to set a different default field. [float] -=== Multi Field +==== Multi Field The fields parameter can also include pattern based field names, allowing to automatically expand to the relevant fields (dynamically introduced fields included). For example: @@ -98,7 +98,7 @@ introduced fields included). For example: -------------------------------------------------- [float] -=== Flags +==== Flags `simple_query_string` support multiple flags to specify which parsing features should be enabled. It is specified as a `|`-delimited string with the `flags` parameter: diff --git a/docs/reference/query-dsl/span-containing-query.asciidoc b/docs/reference/query-dsl/span-containing-query.asciidoc index 3a214858e06..965bf855b6f 100644 --- a/docs/reference/query-dsl/span-containing-query.asciidoc +++ b/docs/reference/query-dsl/span-containing-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-span-containing-query]] -== Span Containing Query +=== Span Containing Query Returns matches which enclose another span query. The span containing query maps to Lucene `SpanContainingQuery`. Here is an example: diff --git a/docs/reference/query-dsl/span-first-query.asciidoc b/docs/reference/query-dsl/span-first-query.asciidoc index b01edea691d..74fe7ff88ba 100644 --- a/docs/reference/query-dsl/span-first-query.asciidoc +++ b/docs/reference/query-dsl/span-first-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-span-first-query]] -== Span First Query +=== Span First Query Matches spans near the beginning of a field. The span first query maps to Lucene `SpanFirstQuery`. Here is an example: diff --git a/docs/reference/query-dsl/span-multi-term-query.asciidoc b/docs/reference/query-dsl/span-multi-term-query.asciidoc index dfb75bcce61..af3da5cf7dd 100644 --- a/docs/reference/query-dsl/span-multi-term-query.asciidoc +++ b/docs/reference/query-dsl/span-multi-term-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-span-multi-term-query]] -== Span Multi Term Query +=== Span Multi Term Query The `span_multi` query allows you to wrap a `multi term query` (one of wildcard, fuzzy, prefix, term, range or regexp query) as a `span query`, so diff --git a/docs/reference/query-dsl/span-near-query.asciidoc b/docs/reference/query-dsl/span-near-query.asciidoc index 2e905fc8870..39982e2ba22 100644 --- a/docs/reference/query-dsl/span-near-query.asciidoc +++ b/docs/reference/query-dsl/span-near-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-span-near-query]] -== Span Near Query +=== Span Near Query Matches spans which are near one another. One can specify _slop_, the maximum number of intervening unmatched positions, as well as whether diff --git a/docs/reference/query-dsl/span-not-query.asciidoc b/docs/reference/query-dsl/span-not-query.asciidoc index a23877f1a8b..73186985b0d 100644 --- a/docs/reference/query-dsl/span-not-query.asciidoc +++ b/docs/reference/query-dsl/span-not-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-span-not-query]] -== Span Not Query +=== Span Not Query Removes matches which overlap with another span query. The span not query maps to Lucene `SpanNotQuery`. Here is an example: diff --git a/docs/reference/query-dsl/span-or-query.asciidoc b/docs/reference/query-dsl/span-or-query.asciidoc index 10a168e3ac6..72a4ce8724b 100644 --- a/docs/reference/query-dsl/span-or-query.asciidoc +++ b/docs/reference/query-dsl/span-or-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-span-or-query]] -== Span Or Query +=== Span Or Query Matches the union of its span clauses. The span or query maps to Lucene `SpanOrQuery`. Here is an example: diff --git a/docs/reference/query-dsl/span-queries.asciidoc b/docs/reference/query-dsl/span-queries.asciidoc new file mode 100644 index 00000000000..63aad48d987 --- /dev/null +++ b/docs/reference/query-dsl/span-queries.asciidoc @@ -0,0 +1,65 @@ +[[span-queries]] +== Span queries + +Span queries are low-level positional queries which provide expert control +over the order and proximity of the specified terms. These are typically used +to implement very specific queries on legal documents or patents. + +Span queries cannot be mixed with non-span queries (with the exception of the `span_multi` query). + +The queries in this group are: + +<>:: + +The equivalent of the <> but for use with +other span queries. + +<>:: + +Wraps a <>, <>, +<>, <>, +<>, or <> query. + +<>:: + +Accepts another span query whose matches must appear within the first N +positions of the field. + +<>:: + +Accepts multiple span queries whose matches must be within the specified distance of each other, and possibly in the same order. + +<>:: + +Combines multiple span queries -- returns documents which match any of the +specified queries. + +<>:: + +Wraps another span query, and excludes any documents which match that query. + +<>:: + +Accepts a list of span queries, but only returns those spans which also match a second span query. + +<>:: + +The result from a single span query is returned as long is its span falls +within the spans returned by a list of other span queries. + + +include::span-term-query.asciidoc[] + +include::span-multi-term-query.asciidoc[] + +include::span-first-query.asciidoc[] + +include::span-near-query.asciidoc[] + +include::span-or-query.asciidoc[] + +include::span-not-query.asciidoc[] + +include::span-containing-query.asciidoc[] + +include::span-within-query.asciidoc[] diff --git a/docs/reference/query-dsl/span-term-query.asciidoc b/docs/reference/query-dsl/span-term-query.asciidoc index 3efe4387efc..9de86d48684 100644 --- a/docs/reference/query-dsl/span-term-query.asciidoc +++ b/docs/reference/query-dsl/span-term-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-span-term-query]] -== Span Term Query +=== Span Term Query Matches spans containing a term. The span term query maps to Lucene `SpanTermQuery`. Here is an example: diff --git a/docs/reference/query-dsl/span-within-query.asciidoc b/docs/reference/query-dsl/span-within-query.asciidoc index 24510ac023c..dc5c4bbfdfd 100644 --- a/docs/reference/query-dsl/span-within-query.asciidoc +++ b/docs/reference/query-dsl/span-within-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-span-within-query]] -== Span Within Query +=== Span Within Query Returns matches which are enclosed inside another span query. The span within query maps to Lucene `SpanWithinQuery`. Here is an example: diff --git a/docs/reference/query-dsl/special-queries.asciidoc b/docs/reference/query-dsl/special-queries.asciidoc new file mode 100644 index 00000000000..1a2d63d2265 --- /dev/null +++ b/docs/reference/query-dsl/special-queries.asciidoc @@ -0,0 +1,29 @@ +[[specialized-queries]] + +== Specialized queries + +This group contains queries which do not fit into the other groups: + +<>:: + +This query finds documents which are similar to the specified text, document, +or collection of documents. + +<>:: + +The `template` query accepts a Mustache template (either inline, indexed, or +from a file), and a map of parameters, and combines the two to generate the +final query to execute. + +<>:: + +This query allows a script to act as a filter. Also see the +<>. + + +include::mlt-query.asciidoc[] + +include::template-query.asciidoc[] + +include::script-query.asciidoc[] + diff --git a/docs/reference/query-dsl/template-query.asciidoc b/docs/reference/query-dsl/template-query.asciidoc index 31728fe9993..14a439bb3ee 100644 --- a/docs/reference/query-dsl/template-query.asciidoc +++ b/docs/reference/query-dsl/template-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-template-query]] -== Template Query +=== Template Query A query that accepts a query template and a map of key/value pairs to fill in template parameters. Templating is based on Mustache. For simple token substitution all you provide @@ -56,7 +56,7 @@ GET /_search <1> New line characters (`\n`) should be escaped as `\\n` or removed, and quotes (`"`) should be escaped as `\\"`. -=== Stored templates +==== Stored templates You can register a template by storing it in the `config/scripts` directory, in a file using the `.mustache` extension. In order to execute the stored template, reference it by name in the `file` diff --git a/docs/reference/query-dsl/term-level-queries.asciidoc b/docs/reference/query-dsl/term-level-queries.asciidoc new file mode 100644 index 00000000000..7e9f5e5ca3e --- /dev/null +++ b/docs/reference/query-dsl/term-level-queries.asciidoc @@ -0,0 +1,93 @@ +[[term-level-queries]] +== Term level queries + +While the <> will analyze the query +string before executing, the _term-level queries_ operate on the exact terms +that are stored in the inverted index. + +These queries are usually used for structured data like numbers, dates, and +enums, rather than full text fields. Alternatively, they allow you to craft +low-level queries, foregoing the analysis process. + +The queries in this group are: + +<>:: + + Find documents which contain the exact term specified in the field + specified. + +<>:: + + Find documents which contain any of the exact terms specified in the field + specified. + +<>:: + + Find documents where the field specified contains values (dates, numbers, + or strings) in the range specified. + +<>:: + + Find documents where the field specified contains any non-null value. + +<>:: + + Find documents where the field specified does is missing or contains only + `null` values. + +<>:: + + Find documents where the field specified contains terms which being with + the exact prefix specified. + +<>:: + + Find documents where the field specified contains terms which match the + pattern specified, where the pattern supports single character wildcards + (`?`) and multi-character wildcards (`*`) + +<>:: + + Find documents where the field specified contains terms which match the + <> specified. + +<>:: + + Find documents where the field specified contains terms which are fuzzily + similar to the specified term. Fuzziness is measured as a + http://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance[Levenshtein edit distance] + of 1 or 2. + +<>:: + + Find documents of the specified type. + +<>:: + + Find documents with the specified type and IDs. + + +include::term-query.asciidoc[] + +include::terms-query.asciidoc[] + +include::range-query.asciidoc[] + +include::exists-query.asciidoc[] + +include::missing-query.asciidoc[] + +include::prefix-query.asciidoc[] + +include::wildcard-query.asciidoc[] + +include::regexp-query.asciidoc[] + +include::fuzzy-query.asciidoc[] + +include::type-query.asciidoc[] + +include::ids-query.asciidoc[] + + + diff --git a/docs/reference/query-dsl/term-query.asciidoc b/docs/reference/query-dsl/term-query.asciidoc index ed81870bfdd..a34ae5b0a68 100644 --- a/docs/reference/query-dsl/term-query.asciidoc +++ b/docs/reference/query-dsl/term-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-term-query]] -== Term Query +=== Term Query The `term` query finds documents that contain the *exact* term specified in the inverted index. For instance: diff --git a/docs/reference/query-dsl/terms-query.asciidoc b/docs/reference/query-dsl/terms-query.asciidoc index 52092f17655..58b0ba5d85b 100644 --- a/docs/reference/query-dsl/terms-query.asciidoc +++ b/docs/reference/query-dsl/terms-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-terms-query]] -== Terms Query +=== Terms Query Filters documents that have fields that match any of the provided terms (*not analyzed*). For example: @@ -19,7 +19,8 @@ The `terms` query is also aliased with `in` as the filter name for simpler usage. [float] -==== Terms lookup mechanism +[[query-dsl-terms-lookup]] +===== Terms lookup mechanism When it's needed to specify a `terms` filter with a lot of terms it can be beneficial to fetch those term values from a document in an index. A @@ -31,21 +32,21 @@ lookup mechanism. The terms lookup mechanism supports the following options: [horizontal] -`index`:: +`index`:: The index to fetch the term values from. Defaults to the current index. -`type`:: +`type`:: The type to fetch the term values from. -`id`:: +`id`:: The id of the document to fetch the term values from. -`path`:: +`path`:: The field specified as path to fetch the actual values for the `terms` filter. -`routing`:: +`routing`:: A custom routing value to be used when retrieving the external terms doc. @@ -61,7 +62,7 @@ terms filter will prefer to execute the get request on a local node if possible, reducing the need for networking. [float] -==== Terms lookup twitter example +===== Terms lookup twitter example [source,js] -------------------------------------------------- diff --git a/docs/reference/query-dsl/type-query.asciidoc b/docs/reference/query-dsl/type-query.asciidoc index 3aa932e5670..d3ef1a30fb1 100644 --- a/docs/reference/query-dsl/type-query.asciidoc +++ b/docs/reference/query-dsl/type-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-type-query]] -== Type Query +=== Type Query Filters documents matching the provided document / mapping type. diff --git a/docs/reference/query-dsl/wildcard-query.asciidoc b/docs/reference/query-dsl/wildcard-query.asciidoc index 9d4bc759f3d..d72dbec2481 100644 --- a/docs/reference/query-dsl/wildcard-query.asciidoc +++ b/docs/reference/query-dsl/wildcard-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-wildcard-query]] -== Wildcard Query +=== Wildcard Query Matches documents that have fields matching a wildcard expression (*not analyzed*). Supported wildcards are `*`, which matches any character From a522ddb4acf712dd3d9104ea48c8844fcb06785b Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 4 Jun 2015 12:29:17 +0200 Subject: [PATCH 14/16] [TEST] Add more logging to allocation decision --- .../java/org/elasticsearch/cluster/MinimumMasterNodesTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesTests.java b/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesTests.java index 68d682ddf7b..2034dc41b81 100644 --- a/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesTests.java +++ b/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.junit.Test; import java.util.concurrent.ExecutionException; @@ -163,6 +164,7 @@ public class MinimumMasterNodesTests extends ElasticsearchIntegrationTest { } @Test @Slow + @TestLogging("cluster.routing.allocation.allocator:TRACE") public void multipleNodesShutdownNonMasterNodes() throws Exception { Settings settings = settingsBuilder() .put("discovery.type", "zen") From f85a17ff1a01c4494a15a5337b1ddd083d44b34b Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 4 Jun 2015 13:16:32 +0200 Subject: [PATCH 15/16] Docs: Fixed heading level for in query DSL docs --- .../query-dsl/function-score-query.asciidoc | 24 +++++++++---------- .../query-dsl/geo-shape-query.asciidoc | 10 ++++---- docs/reference/query-dsl/match-query.asciidoc | 2 +- .../query-dsl/regexp-syntax.asciidoc | 2 +- 4 files changed, 17 insertions(+), 21 deletions(-) diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index 5d1ee98f83a..5bf501a8dc9 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -89,11 +89,9 @@ query. The parameter `boost_mode` defines how: By default, modifying the score does not change which documents match. To exclude documents that do not meet a certain score threshold the `min_score` parameter can be set to the desired score threshold. -==== Score functions - The `function_score` query provides several types of score functions. -====== Script score +==== Script score The `script_score` function allows you to wrap another query and customize the scoring of it optionally with a computation derived from other numeric @@ -133,7 +131,7 @@ Note that unlike the `custom_score` query, the score of the query is multiplied with the result of the script scoring. If you wish to inhibit this, set `"boost_mode": "replace"` -====== Weight +==== Weight The `weight` score allows you to multiply the score by the provided `weight`. This can sometimes be desired since boost value set on @@ -145,7 +143,7 @@ not. "weight" : number -------------------------------------------------- -====== Random +==== Random The `random_score` generates scores using a hash of the `_uid` field, with a `seed` for variation. If `seed` is not specified, the current @@ -161,7 +159,7 @@ be a memory intensive operation since the values are unique. } -------------------------------------------------- -====== Field Value factor +==== Field Value factor The `field_value_factor` function allows you to use a field from a document to influence the score. It's similar to using the `script_score` function, however, @@ -205,7 +203,7 @@ is an illegal operation, and an exception will be thrown. Be sure to limit the values of the field with a range filter to avoid this, or use `log1p` and `ln1p`. -====== Decay functions +==== Decay functions Decay functions score a document with a function that decays depending on the distance of a numeric field value of the document from a user @@ -332,7 +330,7 @@ For single functions the three decay functions together with their parameters ca image:images/decay_2d.png[width=600] -====== Multiple values: +===== Multiple values: If a field used for computing the decay contains multiple values, per default the value closest to the origin is chosen for determining the distance. This can be changed by setting `multi_value_mode`. @@ -448,7 +446,7 @@ curl 'localhost:9200/hotels/_search/' -d '{ Next, we show how the computed score looks like for each of the three possible decay functions. -====== Normal decay, keyword `gauss` +===== Normal decay, keyword `gauss` When choosing `gauss` as the decay function in the above example, the contour and surface plot of the multiplier looks like this: @@ -469,7 +467,7 @@ of 0.56. "BnB Bellevue" and "Backback Nap" are both pretty close to the defined location but "BnB Bellevue" is cheaper, so it gets a multiplier of 0.86 whereas "Backpack Nap" gets a value of 0.66. -====== Exponential decay, keyword `exp` +===== Exponential decay, keyword `exp` When choosing `exp` as the decay function in the above example, the contour and surface plot of the multiplier looks like this: @@ -478,7 +476,7 @@ image::https://f.cloud.github.com/assets/4320215/768161/082975c0-e899-11e2-86f7- image::https://f.cloud.github.com/assets/4320215/768162/0b606884-e899-11e2-907b-aefc77eefef6.png[width="700px"] -====== Linear' decay, keyword `linear` +===== Linear decay, keyword `linear` When choosing `linear` as the decay function in the above example, the contour and surface plot of the multiplier looks like this: @@ -487,12 +485,12 @@ image::https://f.cloud.github.com/assets/4320215/768164/1775b0ca-e899-11e2-9f4a- image::https://f.cloud.github.com/assets/4320215/768165/19d8b1aa-e899-11e2-91bc-6b0553e8d722.png[width="700px"] -===== Supported fields for decay functions +==== Supported fields for decay functions Only single valued numeric fields, including time and geo locations, are supported. -===== What if a field is missing? +==== What if a field is missing? If the numeric field is missing in the document, the function will return 1. diff --git a/docs/reference/query-dsl/geo-shape-query.asciidoc b/docs/reference/query-dsl/geo-shape-query.asciidoc index 94c085c0902..7a11677f0b1 100644 --- a/docs/reference/query-dsl/geo-shape-query.asciidoc +++ b/docs/reference/query-dsl/geo-shape-query.asciidoc @@ -1,5 +1,5 @@ [[query-dsl-geo-shape-query]] -=== GeoShape Filter +=== GeoShape Query Filter documents indexed using the `geo_shape` type. @@ -10,14 +10,12 @@ geo_shape mapping to find documents that have a shape that intersects with the query shape. It will also use the same PrefixTree configuration as defined for the field mapping. -==== Filter Format - -The Filter supports two ways of defining the Filter shape, either by +The query supports two ways of defining the query shape, either by providing a whole shape definition, or by referencing the name of a shape pre-indexed in another index. Both formats are defined below with examples. -====== Provided Shape Definition +==== Inline Shape Definition Similar to the `geo_shape` type, the `geo_shape` Filter uses http://www.geojson.org[GeoJSON] to represent shapes. @@ -61,7 +59,7 @@ The following query will find the point using the Elasticsearch's } -------------------------------------------------- -===== Pre-Indexed Shape +==== Pre-Indexed Shape The Filter also supports using a shape which has already been indexed in another index and/or index type. This is particularly useful for when diff --git a/docs/reference/query-dsl/match-query.asciidoc b/docs/reference/query-dsl/match-query.asciidoc index e06f2e92a5a..26029e0bb17 100644 --- a/docs/reference/query-dsl/match-query.asciidoc +++ b/docs/reference/query-dsl/match-query.asciidoc @@ -38,7 +38,7 @@ data-type mismatches, such as trying to query a numeric field with a text query string. Defaults to `false`. [[query-dsl-match-query-fuzziness]] -====== Fuzziness +===== Fuzziness `fuzziness` allows _fuzzy matching_ based on the type of field being queried. See <> for allowed settings. diff --git a/docs/reference/query-dsl/regexp-syntax.asciidoc b/docs/reference/query-dsl/regexp-syntax.asciidoc index b261d922b98..e57d0e1c779 100644 --- a/docs/reference/query-dsl/regexp-syntax.asciidoc +++ b/docs/reference/query-dsl/regexp-syntax.asciidoc @@ -200,7 +200,7 @@ For string `"abcd"`: -- -====== Optional operators +===== Optional operators These operators are available by default as the `flags` parameter defaults to `ALL`. Different flag combinations (concatened with `"\"`) can be used to enable/disable From 39a20c3b5b68578a0c9b93d8bbd62a8c81e227cf Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Wed, 3 Jun 2015 13:46:56 +0100 Subject: [PATCH 16/16] Aggregations: Allow aggregation_binary to build and parse Previously AggregationBuilder would wrap binary_aggregations in an aggregations object which would break parsing. This has been fixed so that for normally specified aggregations there are wrapped in an `aggregations` object, for binary aggregation which have the same XContentType as the builder it will use an `aggregations` field name and use the aggregationsBinary as the value (this will render the same as normal aggregations), and for binary aggregation with a different ContentType from the builder we use an `aggregations_binary` field name and add the aggregationsBinary as a binary value. Additionally the logic in AggregationParsers needed to be changed as it previously did not parse `aggregations_binary` fields in sub-aggregations. A check has been added for the `aggregations_binary` field name and the binaryValue of this field is used to create a new parser and create the correct AggregatorFactories. Close #11457 --- .../aggregations/AggregationBuilder.java | 4 +- .../aggregations/AggregatorParsers.java | 58 ++++--- .../aggregations/AggregationsBinaryTests.java | 142 ++++++++++++++++++ 3 files changed, 184 insertions(+), 20 deletions(-) create mode 100644 src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryTests.java diff --git a/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java b/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java index d41daa7363f..b94f657de94 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java +++ b/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java @@ -122,12 +122,13 @@ public abstract class AggregationBuilder> extend internalXContent(builder, params); if (aggregations != null || aggregationsBinary != null) { - builder.startObject("aggregations"); if (aggregations != null) { + builder.startObject("aggregations"); for (AbstractAggregationBuilder subAgg : aggregations) { subAgg.toXContent(builder, params); } + builder.endObject(); } if (aggregationsBinary != null) { @@ -138,7 +139,6 @@ public abstract class AggregationBuilder> extend } } - builder.endObject(); } return builder.endObject(); diff --git a/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java b/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java index d299b099b9d..105f46187de 100644 --- a/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java +++ b/src/main/java/org/elasticsearch/search/aggregations/AggregatorParsers.java @@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableMap; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -140,45 +141,66 @@ public class AggregatorParsers { final String fieldName = parser.currentName(); token = parser.nextToken(); - if (token != XContentParser.Token.START_OBJECT) { - throw new SearchParseException(context, "Expected [" + XContentParser.Token.START_OBJECT + "] under [" + fieldName - + "], but got a [" + token + "] in [" + aggregationName + "]", parser.getTokenLocation()); - } - - switch (fieldName) { + if ("aggregations_binary".equals(fieldName)) { + if (subFactories != null) { + throw new SearchParseException(context, "Found two sub aggregation definitions under [" + aggregationName + "]", + parser.getTokenLocation()); + } + XContentParser binaryParser = null; + if (token == XContentParser.Token.VALUE_STRING || token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { + byte[] source = parser.binaryValue(); + binaryParser = XContentFactory.xContent(source).createParser(source); + } else { + throw new SearchParseException(context, "Expected [" + XContentParser.Token.VALUE_STRING + " or " + + XContentParser.Token.VALUE_EMBEDDED_OBJECT + "] for [" + fieldName + "], but got a [" + token + "] in [" + + aggregationName + "]", parser.getTokenLocation()); + } + XContentParser.Token binaryToken = binaryParser.nextToken(); + if (binaryToken != XContentParser.Token.START_OBJECT) { + throw new SearchParseException(context, "Expected [" + XContentParser.Token.START_OBJECT + + "] as first token when parsing [" + fieldName + "], but got a [" + binaryToken + "] in [" + + aggregationName + "]", parser.getTokenLocation()); + } + subFactories = parseAggregators(binaryParser, context, level + 1); + } else if (token == XContentParser.Token.START_OBJECT) { + switch (fieldName) { case "meta": metaData = parser.map(); break; case "aggregations": case "aggs": if (subFactories != null) { - throw new SearchParseException(context, "Found two sub aggregation definitions under [" + aggregationName + "]", - parser.getTokenLocation()); + throw new SearchParseException(context, + "Found two sub aggregation definitions under [" + aggregationName + "]", parser.getTokenLocation()); } - subFactories = parseAggregators(parser, context, level+1); + subFactories = parseAggregators(parser, context, level + 1); break; default: if (aggFactory != null) { - throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: [" - + aggFactory.type + "] and [" + fieldName + "]", parser.getTokenLocation()); + throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + + "]: [" + aggFactory.type + "] and [" + fieldName + "]", parser.getTokenLocation()); } - if (pipelineAggregatorFactory != null) { - throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + "]: [" - + pipelineAggregatorFactory + "] and [" + fieldName + "]", parser.getTokenLocation()); + if (pipelineAggregatorFactory != null) { + throw new SearchParseException(context, "Found two aggregation type definitions in [" + aggregationName + + "]: [" + pipelineAggregatorFactory + "] and [" + fieldName + "]", parser.getTokenLocation()); } Aggregator.Parser aggregatorParser = parser(fieldName); if (aggregatorParser == null) { - PipelineAggregator.Parser pipelineAggregatorParser = pipelineAggregator(fieldName); - if (pipelineAggregatorParser == null) { + PipelineAggregator.Parser pipelineAggregatorParser = pipelineAggregator(fieldName); + if (pipelineAggregatorParser == null) { throw new SearchParseException(context, "Could not find aggregator type [" + fieldName + "] in [" - + aggregationName + "]", parser.getTokenLocation()); + + aggregationName + "]", parser.getTokenLocation()); } else { - pipelineAggregatorFactory = pipelineAggregatorParser.parse(aggregationName, parser, context); + pipelineAggregatorFactory = pipelineAggregatorParser.parse(aggregationName, parser, context); } } else { aggFactory = aggregatorParser.parse(aggregationName, parser, context); } + } + } else { + throw new SearchParseException(context, "Expected [" + XContentParser.Token.START_OBJECT + "] under [" + fieldName + + "], but got a [" + token + "] in [" + aggregationName + "]", parser.getTokenLocation()); } } diff --git a/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryTests.java b/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryTests.java new file mode 100644 index 00000000000..2e27c683046 --- /dev/null +++ b/src/test/java/org/elasticsearch/search/aggregations/AggregationsBinaryTests.java @@ -0,0 +1,142 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations; + +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.Requests; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; +import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder; +import org.elasticsearch.test.ElasticsearchIntegrationTest; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.core.IsNull.notNullValue; + +@ElasticsearchIntegrationTest.SuiteScopeTest +public class AggregationsBinaryTests extends ElasticsearchIntegrationTest { + + private static final String STRING_FIELD_NAME = "s_value"; + private static final String INT_FIELD_NAME = "i_value"; + + @Override + public void setupSuiteScopeCluster() throws Exception { + createIndex("idx"); + List builders = new ArrayList<>(); + for (int i = 0; i < 5; i++) { + builders.add(client().prepareIndex("idx", "type").setSource( + jsonBuilder().startObject().field(STRING_FIELD_NAME, "val" + i).field(INT_FIELD_NAME, i).endObject())); + } + indexRandom(true, builders); + ensureSearchable(); + } + + @Test + public void testAggregationsBinary() throws Exception { + TermsBuilder termsBuilder = AggregationBuilders.terms("terms").field(STRING_FIELD_NAME); + TermsBuilder subTerm = AggregationBuilders.terms("subterms").field(INT_FIELD_NAME); + + // Create an XContentBuilder from sub aggregation + XContentBuilder subTermContentBuilder = JsonXContent.contentBuilder().startObject(); + subTerm.toXContent(subTermContentBuilder, ToXContent.EMPTY_PARAMS); + subTermContentBuilder.endObject(); + + // Add sub aggregation as a XContentBuilder (binary_aggregation) + termsBuilder.subAggregation(subTermContentBuilder); + + SearchResponse response = client().prepareSearch("idx").setTypes("type").addAggregation(termsBuilder).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(5)); + + for (int i = 0; i < 5; i++) { + Terms.Bucket bucket = terms.getBucketByKey("val" + i); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKeyAsString(), equalTo("val" + i)); + assertThat(bucket.getDocCount(), equalTo(1l)); + Aggregations subAggs = bucket.getAggregations(); + assertThat(subAggs, notNullValue()); + assertThat(subAggs.asList().size(), equalTo(1)); + Terms subTerms = subAggs.get("subterms"); + assertThat(subTerms, notNullValue()); + List subTermsBuckets = subTerms.getBuckets(); + assertThat(subTermsBuckets, notNullValue()); + assertThat(subTermsBuckets.size(), equalTo(1)); + assertThat(((Number) subTermsBuckets.get(0).getKey()).intValue(), equalTo(i)); + assertThat(subTermsBuckets.get(0).getDocCount(), equalTo(1l)); + } + } + + @Test + public void testAggregationsBinarySameContentType() throws Exception { + TermsBuilder termsBuilder = AggregationBuilders.terms("terms").field(STRING_FIELD_NAME); + TermsBuilder subTerm = AggregationBuilders.terms("subterms").field(INT_FIELD_NAME); + + // Create an XContentBuilder from sub aggregation + + XContentBuilder subTermContentBuilder = XContentFactory.contentBuilder(Requests.CONTENT_TYPE); + subTermContentBuilder.startObject(); + subTerm.toXContent(subTermContentBuilder, ToXContent.EMPTY_PARAMS); + subTermContentBuilder.endObject(); + + // Add sub aggregation as a XContentBuilder (binary_aggregation) + termsBuilder.subAggregation(subTermContentBuilder); + + SearchResponse response = client().prepareSearch("idx").setTypes("type").addAggregation(termsBuilder).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(5)); + + for (int i = 0; i < 5; i++) { + Terms.Bucket bucket = terms.getBucketByKey("val" + i); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKeyAsString(), equalTo("val" + i)); + assertThat(bucket.getDocCount(), equalTo(1l)); + Aggregations subAggs = bucket.getAggregations(); + assertThat(subAggs, notNullValue()); + assertThat(subAggs.asList().size(), equalTo(1)); + Terms subTerms = subAggs.get("subterms"); + assertThat(subTerms, notNullValue()); + List subTermsBuckets = subTerms.getBuckets(); + assertThat(subTermsBuckets, notNullValue()); + assertThat(subTermsBuckets.size(), equalTo(1)); + assertThat(((Number) subTermsBuckets.get(0).getKey()).intValue(), equalTo(i)); + assertThat(subTermsBuckets.get(0).getDocCount(), equalTo(1l)); + } + } +}