Merge pull request #11812 from rjernst/pr/8871

Restrict fields with the same name in different types to have the same core settings
This commit is contained in:
Ryan Ernst 2015-06-24 08:07:18 -07:00
commit b4ac0b2312
104 changed files with 1625 additions and 1390 deletions

View File

@ -41,6 +41,7 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
private final TransportMessage originalMessage;
private final String cause;
private final String index;
private final boolean updateAllTypes;
private IndexMetaData.State state = IndexMetaData.State.OPEN;
@ -55,10 +56,11 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
private final Set<ClusterBlock> blocks = Sets.newHashSet();
CreateIndexClusterStateUpdateRequest(TransportMessage originalMessage, String cause, String index) {
CreateIndexClusterStateUpdateRequest(TransportMessage originalMessage, String cause, String index, boolean updateAllTypes) {
this.originalMessage = originalMessage;
this.cause = cause;
this.index = index;
this.updateAllTypes = updateAllTypes;
}
public CreateIndexClusterStateUpdateRequest settings(Settings settings) {
@ -126,4 +128,9 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
public Set<ClusterBlock> blocks() {
return blocks;
}
/** True if all fields that span multiple types should be updated, false otherwise */
public boolean updateAllTypes() {
return updateAllTypes;
}
}

View File

@ -72,6 +72,8 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
private final Map<String, IndexMetaData.Custom> customs = newHashMap();
private boolean updateAllTypes = false;
CreateIndexRequest() {
}
@ -433,6 +435,17 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
return this.customs;
}
/** True if all fields that span multiple types should be updated, false otherwise */
public boolean updateAllTypes() {
return updateAllTypes;
}
/** See {@link #updateAllTypes()} */
public CreateIndexRequest updateAllTypes(boolean updateAllTypes) {
this.updateAllTypes = updateAllTypes;
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
@ -454,6 +467,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
for (int i = 0; i < aliasesSize; i++) {
aliases.add(Alias.read(in));
}
updateAllTypes = in.readBoolean();
}
@Override
@ -477,5 +491,6 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
for (Alias alias : aliases) {
alias.writeTo(out);
}
out.writeBoolean(updateAllTypes);
}
}

View File

@ -243,4 +243,10 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder<Create
request.source(source);
return this;
}
/** True if all fields that span multiple types should be updated, false otherwise */
public CreateIndexRequestBuilder setUpdateAllTypes(boolean updateAllTypes) {
request.updateAllTypes(updateAllTypes);
return this;
}
}

View File

@ -71,7 +71,7 @@ public class TransportCreateIndexAction extends TransportMasterNodeAction<Create
cause = "api";
}
final CreateIndexClusterStateUpdateRequest updateRequest = new CreateIndexClusterStateUpdateRequest(request, cause, request.index())
final CreateIndexClusterStateUpdateRequest updateRequest = new CreateIndexClusterStateUpdateRequest(request, cause, request.index(), request.updateAllTypes())
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
.settings(request.settings()).mappings(request.mappings())
.aliases(request.aliases()).customs(request.customs());

View File

@ -30,6 +30,8 @@ public class PutMappingClusterStateUpdateRequest extends IndicesClusterStateUpda
private String source;
private boolean updateAllTypes = false;
PutMappingClusterStateUpdateRequest() {
}
@ -51,4 +53,13 @@ public class PutMappingClusterStateUpdateRequest extends IndicesClusterStateUpda
this.source = source;
return this;
}
public boolean updateAllTypes() {
return updateAllTypes;
}
public PutMappingClusterStateUpdateRequest updateAllTypes(boolean updateAllTypes) {
this.updateAllTypes = updateAllTypes;
return this;
}
}

View File

@ -63,6 +63,8 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
private String source;
private boolean updateAllTypes = false;
PutMappingRequest() {
}
@ -236,6 +238,17 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
return this;
}
/** True if all fields that span multiple types should be updated, false otherwise */
public boolean updateAllTypes() {
return updateAllTypes;
}
/** See {@link #updateAllTypes()} */
public PutMappingRequest updateAllTypes(boolean updateAllTypes) {
this.updateAllTypes = updateAllTypes;
return this;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
@ -243,6 +256,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
indicesOptions = IndicesOptions.readIndicesOptions(in);
type = in.readOptionalString();
source = in.readString();
updateAllTypes = in.readBoolean();
readTimeout(in);
}
@ -253,6 +267,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
indicesOptions.writeIndicesOptions(out);
out.writeOptionalString(type);
out.writeString(source);
out.writeBoolean(updateAllTypes);
writeTimeout(out);
}
}

View File

@ -91,4 +91,10 @@ public class PutMappingRequestBuilder extends AcknowledgedRequestBuilder<PutMapp
return this;
}
/** True if all fields that span multiple types should be updated, false otherwise */
public PutMappingRequestBuilder setUpdateAllTypes(boolean updateAllTypes) {
request.updateAllTypes(updateAllTypes);
return this;
}
}

View File

@ -69,6 +69,7 @@ public class TransportPutMappingAction extends TransportMasterNodeAction<PutMapp
PutMappingClusterStateUpdateRequest updateRequest = new PutMappingClusterStateUpdateRequest()
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
.indices(concreteIndices).type(request.type())
.updateAllTypes(request.updateAllTypes())
.source(request.source());
metaDataMappingService.putMapping(updateRequest, new ActionListener<ClusterStateUpdateResponse>() {

View File

@ -347,7 +347,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
// first, add the default mapping
if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) {
try {
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false);
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false, request.updateAllTypes());
} catch (Exception e) {
removalReason = "failed on parsing default mapping on index creation";
throw new MapperParsingException("mapping [" + MapperService.DEFAULT_MAPPING + "]", e);
@ -359,7 +359,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
}
try {
// apply the default here, its the first time we parse it
mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true);
mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true, request.updateAllTypes());
} catch (Exception e) {
removalReason = "failed on parsing mappings on index creation";
throw new MapperParsingException("mapping [" + entry.getKey() + "]", e);

View File

@ -101,11 +101,11 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
try {
indexService = indicesService.createIndex(indexMetaData.index(), indexMetaData.settings(), clusterService.localNode().id());
if (indexMetaData.mappings().containsKey(MapperService.DEFAULT_MAPPING)) {
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.mappings().get(MapperService.DEFAULT_MAPPING).source(), false);
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.mappings().get(MapperService.DEFAULT_MAPPING).source(), false, false);
}
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.mappings().values()) {
MappingMetaData mappingMetaData = cursor.value;
indexService.mapperService().merge(mappingMetaData.type(), mappingMetaData.source(), false);
indexService.mapperService().merge(mappingMetaData.type(), mappingMetaData.source(), false, false);
}
} catch (Exception e) {
logger.warn("[{}] failed to temporary create in order to apply alias action", e, indexMetaData.index());

View File

@ -193,7 +193,7 @@ public class MetaDataMappingService extends AbstractComponent {
// only add the current relevant mapping (if exists)
if (indexMetaData.mappings().containsKey(type)) {
// don't apply the default mapping, it has been applied when the mapping was created
indexService.mapperService().merge(type, indexMetaData.mappings().get(type).source(), false);
indexService.mapperService().merge(type, indexMetaData.mappings().get(type).source(), false, true);
}
}
}
@ -264,7 +264,7 @@ public class MetaDataMappingService extends AbstractComponent {
continue;
}
DocumentMapper updatedMapper = indexService.mapperService().merge(type, mappingSource, false);
DocumentMapper updatedMapper = indexService.mapperService().merge(type, mappingSource, false, true);
processedRefreshes.add(type);
// if we end up with the same mapping as the original once, ignore
@ -361,11 +361,11 @@ public class MetaDataMappingService extends AbstractComponent {
indicesToClose.add(indexMetaData.index());
// make sure to add custom default mapping if exists
if (indexMetaData.mappings().containsKey(MapperService.DEFAULT_MAPPING)) {
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.mappings().get(MapperService.DEFAULT_MAPPING).source(), false);
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.mappings().get(MapperService.DEFAULT_MAPPING).source(), false, request.updateAllTypes());
}
// only add the current relevant mapping (if exists)
if (indexMetaData.mappings().containsKey(request.type())) {
indexService.mapperService().merge(request.type(), indexMetaData.mappings().get(request.type()).source(), false);
indexService.mapperService().merge(request.type(), indexMetaData.mappings().get(request.type()).source(), false, request.updateAllTypes());
}
}
@ -383,7 +383,7 @@ public class MetaDataMappingService extends AbstractComponent {
newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null);
if (existingMapper != null) {
// first, simulate
MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true);
MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes());
// if we have conflicts, throw an exception
if (mergeResult.hasConflicts()) {
throw new MergeMappingException(mergeResult.buildConflicts());
@ -438,7 +438,7 @@ public class MetaDataMappingService extends AbstractComponent {
if (existingMappers.containsKey(entry.getKey())) {
existingSource = existingMappers.get(entry.getKey()).mappingSource();
}
DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false);
DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false, request.updateAllTypes());
CompressedXContent updatedSource = mergedMapper.mappingSource();
if (existingSource != null) {

View File

@ -52,20 +52,15 @@ public final class FieldNameAnalyzer extends DelegatingAnalyzerWrapper {
return defaultAnalyzer;
}
/** NOTE: public so MapperAnalyzer can invoke: */
@Override
public Analyzer getWrappedAnalyzer(String fieldName) {
return getAnalyzer(fieldName);
}
private Analyzer getAnalyzer(String name) {
Analyzer analyzer = analyzers.get(name);
protected Analyzer getWrappedAnalyzer(String fieldName) {
Analyzer analyzer = analyzers.get(fieldName);
if (analyzer != null) {
return analyzer;
}
// Don't be lenient here and return the default analyzer
// Fields need to be explicitly added
throw new IllegalArgumentException("Field [" + name + "] has no associated analyzer");
throw new IllegalArgumentException("Field [" + fieldName + "] has no associated analyzer");
}
/**

View File

@ -109,9 +109,20 @@ public class FieldsVisitor extends StoredFieldVisitor {
public void postProcess(DocumentMapper documentMapper) {
for (Map.Entry<String, List<Object>> entry : fields().entrySet()) {
FieldMapper fieldMapper = documentMapper.mappers().indexName(entry.getKey()).mapper();
String indexName = entry.getKey();
FieldMapper fieldMapper = documentMapper.mappers().getMapper(indexName);
if (fieldMapper == null) {
continue;
// it's possible index name doesn't match field name (legacy feature)
for (FieldMapper mapper : documentMapper.mappers()) {
if (mapper.fieldType().names().indexName().equals(indexName)) {
fieldMapper = mapper;
break;
}
}
if (fieldMapper == null) {
// no index name or full name found, so skip
continue;
}
}
List<Object> fieldValues = entry.getValue();
for (int i = 0; i < fieldValues.size(); i++) {

View File

@ -22,32 +22,38 @@ package org.elasticsearch.index.mapper;
import com.google.common.base.Function;
import com.google.common.collect.Collections2;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
*
*/
public final class DocumentFieldMappers implements Iterable<FieldMapper> {
private final FieldMappersLookup fieldMappers;
/** Full field name to mapper */
private final CopyOnWriteHashMap<String, FieldMapper> fieldMappers;
private final FieldNameAnalyzer indexAnalyzer;
private final FieldNameAnalyzer searchAnalyzer;
private final FieldNameAnalyzer searchQuoteAnalyzer;
public DocumentFieldMappers(AnalysisService analysisService) {
this(new FieldMappersLookup(), new FieldNameAnalyzer(analysisService.defaultIndexAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchQuoteAnalyzer()));
this(new CopyOnWriteHashMap<String, FieldMapper>(),
new FieldNameAnalyzer(analysisService.defaultIndexAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchAnalyzer()),
new FieldNameAnalyzer(analysisService.defaultSearchQuoteAnalyzer()));
}
private DocumentFieldMappers(FieldMappersLookup fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) {
private DocumentFieldMappers(CopyOnWriteHashMap<String, FieldMapper> fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) {
this.fieldMappers = fieldMappers;
this.indexAnalyzer = indexAnalyzer;
this.searchAnalyzer = searchAnalyzer;
@ -55,7 +61,10 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
}
public DocumentFieldMappers copyAndAllAll(Collection<FieldMapper> newMappers) {
FieldMappersLookup fieldMappers = this.fieldMappers.copyAndAddAll(newMappers);
CopyOnWriteHashMap<String, FieldMapper> map = this.fieldMappers;
for (FieldMapper fieldMapper : newMappers) {
map = map.copyAndPut(fieldMapper.fieldType().names().fullName(), fieldMapper);
}
FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
@Override
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
@ -74,22 +83,7 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchQuoteAnalyzer());
}
}));
return new DocumentFieldMappers(fieldMappers, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer);
}
/**
* Looks up a field by its index name.
*
* Overriding index name for a field is no longer possibly, and only supported for backcompat.
* This function first attempts to lookup the field by full name, and only when that fails,
* does a full scan of all field mappers, collecting those with this index name.
*
* This will be removed in 3.0, once backcompat for overriding index name is removed.
* @deprecated Use {@link #getMapper(String)}
*/
@Deprecated
public FieldMappers indexName(String indexName) {
return fieldMappers.indexName(indexName);
return new DocumentFieldMappers(map, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer);
}
/** Returns the mapper for the given field */
@ -97,23 +91,29 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
return fieldMappers.get(field);
}
Collection<String> simpleMatchToIndexNames(String pattern) {
return fieldMappers.simpleMatchToIndexNames(pattern);
}
public Collection<String> simpleMatchToFullName(String pattern) {
return fieldMappers.simpleMatchToFullName(pattern);
}
/**
* Tries to find first based on fullName, then by indexName.
*/
FieldMappers smartName(String name) {
return fieldMappers.smartName(name);
Set<String> fields = Sets.newHashSet();
for (FieldMapper fieldMapper : this) {
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) {
fields.add(fieldMapper.fieldType().names().fullName());
} else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) {
fields.add(fieldMapper.fieldType().names().fullName());
}
}
return fields;
}
public FieldMapper smartNameFieldMapper(String name) {
return fieldMappers.smartNameFieldMapper(name);
FieldMapper fieldMapper = getMapper(name);
if (fieldMapper != null) {
return fieldMapper;
}
for (FieldMapper otherFieldMapper : this) {
if (otherFieldMapper.fieldType().names().indexName().equals(name)) {
return otherFieldMapper;
}
}
return null;
}
/**
@ -145,6 +145,6 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
}
public Iterator<FieldMapper> iterator() {
return fieldMappers.iterator();
return fieldMappers.values().iterator();
}
}

View File

@ -23,7 +23,6 @@ import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
@ -32,7 +31,6 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.compress.CompressedXContent;
@ -43,6 +41,7 @@ import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.Mapping.SourceTransform;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
@ -96,28 +95,32 @@ public class DocumentMapper implements ToXContent {
private final Mapper.BuilderContext builderContext;
public Builder(String index, Settings indexSettings, RootObjectMapper.Builder builder) {
public Builder(String index, Settings indexSettings, RootObjectMapper.Builder builder, MapperService mapperService) {
this.index = index;
this.indexSettings = indexSettings;
this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1));
this.rootObjectMapper = builder.build(builderContext);
// TODO: find a cleaner way to handle existing root mappings and using their field type as the default.
// the vast majority of these root mappers only need the existing type for backwards compatibility, since
// the pre 2.0 field type settings could be modified
// UID first so it will be the first stored field to load (so will benefit from "fields: []" early termination
this.rootMappers.put(UidFieldMapper.class, new UidFieldMapper(indexSettings));
this.rootMappers.put(IdFieldMapper.class, new IdFieldMapper(indexSettings));
this.rootMappers.put(RoutingFieldMapper.class, new RoutingFieldMapper(indexSettings));
this.rootMappers.put(UidFieldMapper.class, new UidFieldMapper(indexSettings, mapperService.fullName(UidFieldMapper.NAME)));
this.rootMappers.put(IdFieldMapper.class, new IdFieldMapper(indexSettings, mapperService.fullName(IdFieldMapper.NAME)));
this.rootMappers.put(RoutingFieldMapper.class, new RoutingFieldMapper(indexSettings, mapperService.fullName(RoutingFieldMapper.NAME)));
// add default mappers, order is important (for example analyzer should come before the rest to set context.analyzer)
this.rootMappers.put(SizeFieldMapper.class, new SizeFieldMapper(indexSettings));
this.rootMappers.put(IndexFieldMapper.class, new IndexFieldMapper(indexSettings));
this.rootMappers.put(SizeFieldMapper.class, new SizeFieldMapper(indexSettings, mapperService.fullName(SizeFieldMapper.NAME)));
this.rootMappers.put(IndexFieldMapper.class, new IndexFieldMapper(indexSettings, mapperService.fullName(IndexFieldMapper.NAME)));
this.rootMappers.put(SourceFieldMapper.class, new SourceFieldMapper(indexSettings));
this.rootMappers.put(TypeFieldMapper.class, new TypeFieldMapper(indexSettings));
this.rootMappers.put(AllFieldMapper.class, new AllFieldMapper(indexSettings));
this.rootMappers.put(TimestampFieldMapper.class, new TimestampFieldMapper(indexSettings));
this.rootMappers.put(TypeFieldMapper.class, new TypeFieldMapper(indexSettings, mapperService.fullName(TypeFieldMapper.NAME)));
this.rootMappers.put(AllFieldMapper.class, new AllFieldMapper(indexSettings, mapperService.fullName(AllFieldMapper.NAME)));
this.rootMappers.put(TimestampFieldMapper.class, new TimestampFieldMapper(indexSettings, mapperService.fullName(TimestampFieldMapper.NAME)));
this.rootMappers.put(TTLFieldMapper.class, new TTLFieldMapper(indexSettings));
this.rootMappers.put(VersionFieldMapper.class, new VersionFieldMapper(indexSettings));
this.rootMappers.put(ParentFieldMapper.class, new ParentFieldMapper(indexSettings));
this.rootMappers.put(ParentFieldMapper.class, new ParentFieldMapper(indexSettings, mapperService.fullName(ParentFieldMapper.NAME)));
// _field_names last so that it can see all other fields
this.rootMappers.put(FieldNamesFieldMapper.class, new FieldNamesFieldMapper(indexSettings));
this.rootMappers.put(FieldNamesFieldMapper.class, new FieldNamesFieldMapper(indexSettings, mapperService.fullName(FieldNamesFieldMapper.NAME)));
}
public Builder meta(ImmutableMap<String, Object> meta) {
@ -393,87 +396,40 @@ public class DocumentMapper implements ToXContent {
return DocumentParser.transformSourceAsMap(mapping, sourceAsMap);
}
private void addFieldMappers(Collection<FieldMapper> fieldMappers) {
assert mappingLock.isWriteLockedByCurrentThread();
this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers);
mapperService.addFieldMappers(fieldMappers);
}
public boolean isParent(String type) {
return mapperService.getParentTypes().contains(type);
}
private void addObjectMappers(Collection<ObjectMapper> objectMappers) {
private void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {
assert mappingLock.isWriteLockedByCurrentThread();
MapBuilder<String, ObjectMapper> builder = MapBuilder.newMapBuilder(this.objectMappers);
for (ObjectMapper objectMapper : objectMappers) {
builder.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNestedObjects = true;
}
// first ensure we don't have any incompatible new fields
mapperService.checkNewMappersCompatibility(objectMappers, fieldMappers, true);
// update mappers for this document type
MapBuilder<String, ObjectMapper> builder = MapBuilder.newMapBuilder(this.objectMappers);
for (ObjectMapper objectMapper : objectMappers) {
builder.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNestedObjects = true;
}
this.objectMappers = builder.immutableMap();
mapperService.addObjectMappers(objectMappers);
}
private MergeResult newMergeContext(boolean simulate) {
return new MergeResult(simulate) {
final List<String> conflicts = new ArrayList<>();
final List<FieldMapper> newFieldMappers = new ArrayList<>();
final List<ObjectMapper> newObjectMappers = new ArrayList<>();
@Override
public void addFieldMappers(Collection<FieldMapper> fieldMappers) {
assert simulate() == false;
newFieldMappers.addAll(fieldMappers);
}
@Override
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
assert simulate() == false;
newObjectMappers.addAll(objectMappers);
}
@Override
public Collection<FieldMapper> getNewFieldMappers() {
return newFieldMappers;
}
@Override
public Collection<ObjectMapper> getNewObjectMappers() {
return newObjectMappers;
}
@Override
public void addConflict(String mergeFailure) {
conflicts.add(mergeFailure);
}
@Override
public boolean hasConflicts() {
return conflicts.isEmpty() == false;
}
@Override
public String[] buildConflicts() {
return conflicts.toArray(Strings.EMPTY_ARRAY);
}
};
}
public MergeResult merge(Mapping mapping, boolean simulate) {
try (ReleasableLock lock = mappingWriteLock.acquire()) {
final MergeResult mergeResult = newMergeContext(simulate);
this.mapping.merge(mapping, mergeResult);
if (simulate == false) {
addFieldMappers(mergeResult.getNewFieldMappers());
addObjectMappers(mergeResult.getNewObjectMappers());
refreshSource();
}
return mergeResult;
this.objectMappers = builder.immutableMap();
this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers);
// finally update for the entire index
mapperService.addMappers(objectMappers, fieldMappers);
}
public MergeResult merge(Mapping mapping, boolean simulate, boolean updateAllTypes) {
try (ReleasableLock lock = mappingWriteLock.acquire()) {
final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes);
this.mapping.merge(mapping, mergeResult);
if (simulate == false) {
addMappers(mergeResult.getNewObjectMappers(), mergeResult.getNewFieldMappers());
refreshSource();
}
return mergeResult;
}
}
private void refreshSource() throws ElasticsearchGenerationException {

View File

@ -168,7 +168,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
}
public Mapper.TypeParser.ParserContext parserContext() {
return new Mapper.TypeParser.ParserContext(analysisService, similarityLookupService, typeParsers, indexVersionCreated);
return new Mapper.TypeParser.ParserContext(analysisService, similarityLookupService, mapperService, typeParsers, indexVersionCreated);
}
public DocumentMapper parse(String source) throws MapperParsingException {
@ -228,7 +228,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
Mapper.TypeParser.ParserContext parserContext = parserContext();
// parse RootObjectMapper
DocumentMapper.Builder docBuilder = doc(index.name(), indexSettings, (RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext));
DocumentMapper.Builder docBuilder = doc(index.name(), indexSettings, (RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext), mapperService);
Iterator<Map.Entry<String, Object>> iterator = mapping.entrySet().iterator();
// parse DocumentMapper
while(iterator.hasNext()) {

View File

@ -33,6 +33,13 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper.Builder;
import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.core.LongFieldMapper.LongFieldType;
import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
@ -438,6 +445,174 @@ class DocumentParser implements Closeable {
}
}
private static Mapper.Builder<?,?> createBuilderFromFieldType(final ParseContext context, MappedFieldType fieldType, String currentFieldName) {
Mapper.Builder builder = null;
if (fieldType instanceof StringFieldType) {
builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
if (builder == null) {
builder = MapperBuilders.stringField(currentFieldName);
}
} else if (fieldType instanceof DateFieldType) {
builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
if (builder == null) {
builder = MapperBuilders.dateField(currentFieldName);
}
} else if (fieldType.numericType() != null) {
switch (fieldType.numericType()) {
case LONG:
builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = MapperBuilders.longField(currentFieldName);
}
break;
case DOUBLE:
builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = MapperBuilders.doubleField(currentFieldName);
}
break;
case INT:
builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
if (builder == null) {
builder = MapperBuilders.integerField(currentFieldName);
}
break;
case FLOAT:
builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
if (builder == null) {
builder = MapperBuilders.floatField(currentFieldName);
}
break;
default:
throw new AssertionError("Unexpected numeric type " + fieldType.numericType());
}
}
return builder;
}
private static Mapper.Builder<?,?> createBuilderFromDynamicValue(final ParseContext context, XContentParser.Token token, String currentFieldName) throws IOException {
if (token == XContentParser.Token.VALUE_STRING) {
// do a quick test to see if its fits a dynamic template, if so, use it.
// we need to do it here so we can handle things like attachment templates, where calling
// text (to see if its a date) causes the binary value to be cleared
{
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null);
if (builder != null) {
return builder;
}
}
if (context.root().dateDetection()) {
String text = context.parser().text();
// a safe check since "1" gets parsed as well
if (Strings.countOccurrencesOf(text, ":") > 1 || Strings.countOccurrencesOf(text, "-") > 1 || Strings.countOccurrencesOf(text, "/") > 1) {
for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
try {
dateTimeFormatter.parser().parseMillis(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
if (builder == null) {
builder = MapperBuilders.dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter);
}
return builder;
} catch (Exception e) {
// failure to parse this, continue
}
}
}
}
if (context.root().numericDetection()) {
String text = context.parser().text();
try {
Long.parseLong(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = MapperBuilders.longField(currentFieldName);
}
return builder;
} catch (NumberFormatException e) {
// not a long number
}
try {
Double.parseDouble(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = MapperBuilders.doubleField(currentFieldName);
}
return builder;
} catch (NumberFormatException e) {
// not a long number
}
}
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
if (builder == null) {
builder = MapperBuilders.stringField(currentFieldName);
}
return builder;
} else if (token == XContentParser.Token.VALUE_NUMBER) {
XContentParser.NumberType numberType = context.parser().numberType();
if (numberType == XContentParser.NumberType.INT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = MapperBuilders.longField(currentFieldName);
}
return builder;
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
if (builder == null) {
builder = MapperBuilders.integerField(currentFieldName);
}
return builder;
}
} else if (numberType == XContentParser.NumberType.LONG) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = MapperBuilders.longField(currentFieldName);
}
return builder;
} else if (numberType == XContentParser.NumberType.FLOAT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = MapperBuilders.doubleField(currentFieldName);
}
return builder;
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
if (builder == null) {
builder = MapperBuilders.floatField(currentFieldName);
}
return builder;
}
} else if (numberType == XContentParser.NumberType.DOUBLE) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = MapperBuilders.doubleField(currentFieldName);
}
return builder;
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean");
if (builder == null) {
builder = MapperBuilders.booleanField(currentFieldName);
}
return builder;
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "binary");
if (builder == null) {
builder = MapperBuilders.binaryField(currentFieldName);
}
return builder;
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null);
if (builder != null) {
return builder;
}
}
// TODO how do we identify dynamically that its a binary value?
throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]");
}
private static ObjectMapper parseDynamicValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException {
ObjectMapper.Dynamic dynamic = parentMapper.dynamic();
if (dynamic == null) {
@ -449,140 +624,38 @@ class DocumentParser implements Closeable {
if (dynamic == ObjectMapper.Dynamic.FALSE) {
return null;
}
Mapper mapper = null;
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
if (token == XContentParser.Token.VALUE_STRING) {
boolean resolved = false;
// do a quick test to see if its fits a dynamic template, if so, use it.
// we need to do it here so we can handle things like attachment templates, where calling
// text (to see if its a date) causes the binary value to be cleared
if (!resolved) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null);
if (builder != null) {
mapper = builder.build(builderContext);
resolved = true;
}
}
if (!resolved && context.root().dateDetection()) {
String text = context.parser().text();
// a safe check since "1" gets parsed as well
if (Strings.countOccurrencesOf(text, ":") > 1 || Strings.countOccurrencesOf(text, "-") > 1 || Strings.countOccurrencesOf(text, "/") > 1) {
for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
try {
dateTimeFormatter.parser().parseMillis(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
if (builder == null) {
builder = MapperBuilders.dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter);
}
mapper = builder.build(builderContext);
resolved = true;
break;
} catch (Exception e) {
// failure to parse this, continue
}
}
}
}
if (!resolved && context.root().numericDetection()) {
String text = context.parser().text();
try {
Long.parseLong(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = MapperBuilders.longField(currentFieldName);
}
mapper = builder.build(builderContext);
resolved = true;
} catch (Exception e) {
// not a long number
}
if (!resolved) {
try {
Double.parseDouble(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = MapperBuilders.doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
resolved = true;
} catch (Exception e) {
// not a long number
}
}
}
if (!resolved) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
if (builder == null) {
builder = MapperBuilders.stringField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
XContentParser.NumberType numberType = context.parser().numberType();
if (numberType == XContentParser.NumberType.INT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = MapperBuilders.longField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
if (builder == null) {
builder = MapperBuilders.integerField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (numberType == XContentParser.NumberType.LONG) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = MapperBuilders.longField(currentFieldName);
}
mapper = builder.build(builderContext);
} else if (numberType == XContentParser.NumberType.FLOAT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = MapperBuilders.doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
if (builder == null) {
builder = MapperBuilders.floatField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (numberType == XContentParser.NumberType.DOUBLE) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = MapperBuilders.doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean");
if (builder == null) {
builder = MapperBuilders.booleanField(currentFieldName);
}
mapper = builder.build(builderContext);
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "binary");
if (builder == null) {
builder = MapperBuilders.binaryField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null);
final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
final MappedFieldType existingFieldType = context.mapperService().fullName(context.path().fullPathAsText(currentFieldName));
Mapper.Builder builder = null;
if (existingFieldType != null) {
// create a builder of the same type
builder = createBuilderFromFieldType(context, existingFieldType, currentFieldName);
if (builder != null) {
mapper = builder.build(builderContext);
} else {
// TODO how do we identify dynamically that its a binary value?
throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]");
// best-effort to not introduce a conflict
if (builder instanceof StringFieldMapper.Builder) {
StringFieldMapper.Builder stringBuilder = (StringFieldMapper.Builder) builder;
stringBuilder.store(existingFieldType.stored());
stringBuilder.indexOptions(existingFieldType.indexOptions());
stringBuilder.tokenized(existingFieldType.tokenized());
stringBuilder.omitNorms(existingFieldType.omitNorms());
stringBuilder.docValues(existingFieldType.hasDocValues());
stringBuilder.indexAnalyzer(existingFieldType.indexAnalyzer());
stringBuilder.searchAnalyzer(existingFieldType.searchAnalyzer());
} else if (builder instanceof NumberFieldMapper.Builder) {
NumberFieldMapper.Builder<?,?> numberBuilder = (NumberFieldMapper.Builder<?, ?>) builder;
numberBuilder.store(existingFieldType.stored());
numberBuilder.indexOptions(existingFieldType.indexOptions());
numberBuilder.tokenized(existingFieldType.tokenized());
numberBuilder.omitNorms(existingFieldType.omitNorms());
numberBuilder.docValues(existingFieldType.hasDocValues());
numberBuilder.precisionStep(existingFieldType.numericPrecisionStep());
}
}
}
if (builder == null) {
builder = createBuilderFromDynamicValue(context, token, currentFieldName);
}
Mapper mapper = builder.build(builderContext);
mapper = parseAndMergeUpdate(mapper, context);
@ -621,10 +694,9 @@ class DocumentParser implements Closeable {
/** Creates an copy of the current field with given field name and boost */
private static void parseCopy(String field, ParseContext context) throws IOException {
// TODO: this should not be indexName...
FieldMappers mappers = context.docMapper().mappers().indexName(field);
if (mappers != null && !mappers.isEmpty()) {
mappers.mapper().parse(context);
FieldMapper fieldMapper = context.docMapper().mappers().getMapper(field);
if (fieldMapper != null) {
fieldMapper.parse(context);
} else {
// The path of the dest field might be completely different from the current one so we need to reset it
context = context.overridePath(new ContentPath(0));

View File

@ -32,6 +32,15 @@ public interface FieldMapper extends Mapper {
MappedFieldType fieldType();
/** Returns a reference to the MappedFieldType for this mapper. */
MappedFieldTypeReference fieldTypeReference();
/**
* Updates the reference to this field's MappedFieldType.
* Implementations should assert equality of the underlying field type
*/
void setFieldTypeReference(MappedFieldTypeReference ref);
/**
* List of fields where this field should be copied to
*/

View File

@ -1,193 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.google.common.collect.Sets;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
import org.elasticsearch.common.regex.Regex;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.Set;
/**
* A class that holds a map of field mappers from name, index name, and full name.
*/
class FieldMappersLookup implements Iterable<FieldMapper> {
/** Full field name to mappers */
private final CopyOnWriteHashMap<String, FieldMappers> mappers;
/** Create a new empty instance. */
public FieldMappersLookup() {
mappers = new CopyOnWriteHashMap<>();
}
private FieldMappersLookup(CopyOnWriteHashMap<String, FieldMappers> map) {
mappers = map;
}
/**
* Return a new instance that contains the union of this instance and the provided mappers.
*/
public FieldMappersLookup copyAndAddAll(Collection<FieldMapper> newMappers) {
CopyOnWriteHashMap<String, FieldMappers> map = this.mappers;
for (FieldMapper mapper : newMappers) {
String key = mapper.fieldType().names().fullName();
FieldMappers mappers = map.get(key);
if (mappers == null) {
mappers = new FieldMappers(mapper);
} else {
mappers = mappers.concat(mapper);
}
map = map.copyAndPut(key, mappers);
}
return new FieldMappersLookup(map);
}
/**
* Returns the field mappers based on the mapper index name.
* NOTE: this only exists for backcompat support and if the index name
* does not match it's field name, this is a linear time operation
* @deprecated Use {@link #get(String)}
*/
@Deprecated
public FieldMappers indexName(String indexName) {
FieldMappers fieldMappers = fullName(indexName);
if (fieldMappers != null) {
if (fieldMappers.mapper().fieldType().names().indexName().equals(indexName)) {
return fieldMappers;
}
}
fieldMappers = new FieldMappers();
for (FieldMapper mapper : this) {
if (mapper.fieldType().names().indexName().equals(indexName)) {
fieldMappers = fieldMappers.concat(mapper);
}
}
if (fieldMappers.isEmpty()) {
return null;
}
return fieldMappers;
}
/**
* Returns the field mappers based on the mapper full name.
*/
public FieldMappers fullName(String fullName) {
return mappers.get(fullName);
}
/** Returns the mapper for the given field */
public FieldMapper get(String field) {
FieldMappers fieldMappers = mappers.get(field);
if (fieldMappers == null) {
return null;
}
if (fieldMappers.mappers().size() != 1) {
throw new IllegalStateException("Mapper for field [" + field + "] should be unique");
}
return fieldMappers.mapper();
}
/**
* Returns a list of the index names of a simple match regex like pattern against full name and index name.
*/
public Collection<String> simpleMatchToIndexNames(String pattern) {
Set<String> fields = Sets.newHashSet();
for (FieldMapper fieldMapper : this) {
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) {
fields.add(fieldMapper.fieldType().names().indexName());
} else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) {
fields.add(fieldMapper.fieldType().names().indexName());
}
}
return fields;
}
/**
* Returns a list of the full names of a simple match regex like pattern against full name and index name.
*/
public Collection<String> simpleMatchToFullName(String pattern) {
Set<String> fields = Sets.newHashSet();
for (FieldMapper fieldMapper : this) {
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) {
fields.add(fieldMapper.fieldType().names().fullName());
} else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) {
fields.add(fieldMapper.fieldType().names().fullName());
}
}
return fields;
}
/**
* Tries to find first based on {@link #fullName(String)}, then by {@link #indexName(String)}.
*/
@Nullable
FieldMappers smartName(String name) {
FieldMappers fieldMappers = fullName(name);
if (fieldMappers != null) {
return fieldMappers;
}
return indexName(name);
}
/**
* Tries to find first based on {@link #fullName(String)}, then by {@link #indexName(String)}
* and return the first mapper for it (see {@link org.elasticsearch.index.mapper.FieldMappers#mapper()}).
*/
@Nullable
public FieldMapper smartNameFieldMapper(String name) {
FieldMappers fieldMappers = smartName(name);
if (fieldMappers == null) {
return null;
}
return fieldMappers.mapper();
}
public Iterator<FieldMapper> iterator() {
final Iterator<FieldMappers> fieldsItr = mappers.values().iterator();
if (fieldsItr.hasNext() == false) {
return Collections.emptyIterator();
}
return new Iterator<FieldMapper>() {
Iterator<FieldMapper> fieldValuesItr = fieldsItr.next().iterator();
@Override
public boolean hasNext() {
return fieldsItr.hasNext() || fieldValuesItr.hasNext();
}
@Override
public FieldMapper next() {
if (fieldValuesItr.hasNext() == false && fieldsItr.hasNext()) {
fieldValuesItr = fieldsItr.next().iterator();
}
return fieldValuesItr.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("cannot remove field mapper from lookup");
}
};
}
}

View File

@ -0,0 +1,184 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.google.common.base.Function;
import com.google.common.collect.Iterators;
import com.google.common.collect.Sets;
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
import org.elasticsearch.common.regex.Regex;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* An immutable container for looking up {@link MappedFieldType}s by their name.
*/
class FieldTypeLookup implements Iterable<MappedFieldType> {
private static final Function<MappedFieldTypeReference, MappedFieldType> UNWRAPPER = new Function<MappedFieldTypeReference, MappedFieldType>() {
@Override
public MappedFieldType apply(MappedFieldTypeReference ref) {
return ref.get();
}
};
/** Full field name to field type */
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> fullNameToFieldType;
/** Index field name to field type */
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> indexNameToFieldType;
/** Create a new empty instance. */
public FieldTypeLookup() {
fullNameToFieldType = new CopyOnWriteHashMap<>();
indexNameToFieldType = new CopyOnWriteHashMap<>();
}
private FieldTypeLookup(CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName, CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName) {
fullNameToFieldType = fullName;
indexNameToFieldType = indexName;
}
/**
* Return a new instance that contains the union of this instance and the field types
* from the provided fields. If a field already exists, the field type will be updated
* to use the new mappers field type.
*/
public FieldTypeLookup copyAndAddAll(Collection<FieldMapper> newFieldMappers) {
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName = this.fullNameToFieldType;
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName = this.indexNameToFieldType;
for (FieldMapper fieldMapper : newFieldMappers) {
MappedFieldType fieldType = fieldMapper.fieldType();
MappedFieldTypeReference fullNameRef = fullName.get(fieldType.names().fullName());
MappedFieldTypeReference indexNameRef = indexName.get(fieldType.names().indexName());
if (fullNameRef == null && indexNameRef == null) {
// new field, just use the ref from this field mapper
fullName = fullName.copyAndPut(fieldType.names().fullName(), fieldMapper.fieldTypeReference());
indexName = indexName.copyAndPut(fieldType.names().indexName(), fieldMapper.fieldTypeReference());
} else if (fullNameRef == null) {
// this index name already exists, so copy over the reference
fullName = fullName.copyAndPut(fieldType.names().fullName(), indexNameRef);
indexNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
fieldMapper.setFieldTypeReference(indexNameRef);
} else if (indexNameRef == null) {
// this full name already exists, so copy over the reference
indexName = indexName.copyAndPut(fieldType.names().indexName(), fullNameRef);
fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
fieldMapper.setFieldTypeReference(fullNameRef);
} else if (fullNameRef == indexNameRef) {
// the field already exists, so replace the reference in this mapper with the pre-existing one
fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
fieldMapper.setFieldTypeReference(fullNameRef);
} else {
// this new field bridges between two existing field names (a full and index name), which we cannot support
throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName());
}
}
return new FieldTypeLookup(fullName, indexName);
}
/**
* Checks if the given mappers' field types are compatible with existing field types.
* If any are not compatible, an IllegalArgumentException is thrown.
* If updateAllTypes is true, only basic compatibility is checked.
*/
public void checkCompatibility(Collection<FieldMapper> newFieldMappers, boolean updateAllTypes) {
for (FieldMapper fieldMapper : newFieldMappers) {
MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName());
if (ref != null) {
List<String> conflicts = new ArrayList<>();
ref.get().checkTypeName(fieldMapper.fieldType(), conflicts);
if (conflicts.isEmpty()) { // only check compat if they are the same type
boolean strict = ref.getNumAssociatedMappers() > 1 && updateAllTypes == false;
ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
}
if (conflicts.isEmpty() == false) {
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types" + conflicts.toString());
}
}
// field type for the index name must be compatible too
MappedFieldTypeReference indexNameRef = fullNameToFieldType.get(fieldMapper.fieldType().names().indexName());
if (indexNameRef != null) {
List<String> conflicts = new ArrayList<>();
ref.get().checkTypeName(fieldMapper.fieldType(), conflicts);
if (conflicts.isEmpty()) { // only check compat if they are the same type
boolean strict = indexNameRef.getNumAssociatedMappers() > 1 && updateAllTypes == false;
indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
}
if (conflicts.isEmpty() == false) {
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString());
}
}
}
}
/** Returns the field for the given field */
public MappedFieldType get(String field) {
MappedFieldTypeReference ref = fullNameToFieldType.get(field);
if (ref == null) return null;
return ref.get();
}
/** Returns the field type for the given index name */
public MappedFieldType getByIndexName(String field) {
MappedFieldTypeReference ref = indexNameToFieldType.get(field);
if (ref == null) return null;
return ref.get();
}
/**
* Returns a list of the index names of a simple match regex like pattern against full name and index name.
*/
public Collection<String> simpleMatchToIndexNames(String pattern) {
Set<String> fields = Sets.newHashSet();
for (MappedFieldType fieldType : this) {
if (Regex.simpleMatch(pattern, fieldType.names().fullName())) {
fields.add(fieldType.names().indexName());
} else if (Regex.simpleMatch(pattern, fieldType.names().indexName())) {
fields.add(fieldType.names().indexName());
}
}
return fields;
}
/**
* Returns a list of the full names of a simple match regex like pattern against full name and index name.
*/
public Collection<String> simpleMatchToFullName(String pattern) {
Set<String> fields = Sets.newHashSet();
for (MappedFieldType fieldType : this) {
if (Regex.simpleMatch(pattern, fieldType.names().fullName())) {
fields.add(fieldType.names().fullName());
} else if (Regex.simpleMatch(pattern, fieldType.names().indexName())) {
fields.add(fieldType.names().fullName());
}
}
return fields;
}
public Iterator<MappedFieldType> iterator() {
return Iterators.transform(fullNameToFieldType.values().iterator(), UNWRAPPER);
}
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper;
import com.google.common.base.Strings;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
@ -38,7 +37,6 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
@ -52,7 +50,7 @@ import java.util.Objects;
/**
* This defines the core properties and functions to operate on a field.
*/
public class MappedFieldType extends FieldType {
public abstract class MappedFieldType extends FieldType {
public static class Names {
@ -196,12 +194,17 @@ public class MappedFieldType extends FieldType {
this.nullValueAsString = ref.nullValueAsString();
}
public MappedFieldType() {}
public MappedFieldType clone() {
return new MappedFieldType(this);
public MappedFieldType() {
setTokenized(true);
setStored(false);
setStoreTermVectors(false);
setOmitNorms(false);
setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
setBoost(1.0f);
}
public abstract MappedFieldType clone();
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
@ -226,10 +229,24 @@ public class MappedFieldType extends FieldType {
// norelease: we need to override freeze() and add safety checks that all settings are actually set
/** Returns the name of this type, as would be specified in mapping properties */
public abstract String typeName();
/** Checks this type is the same type as other. Adds a conflict if they are different. */
public final void checkTypeName(MappedFieldType other, List<String> conflicts) {
if (typeName().equals(other.typeName()) == false) {
conflicts.add("mapper [" + names().fullName() + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]");
} else if (getClass() != other.getClass()) {
throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and " + other.getClass().getSimpleName());
}
}
/**
* Checks for any conflicts between this field type and other.
* If strict is true, all properties must be equal.
* Otherwise, only properties which must never change in an index are checked.
*/
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
public void checkCompatibility(MappedFieldType other, List<String> conflicts, boolean strict) {
boolean indexed = indexOptions() != IndexOptions.NONE;
boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE;
// TODO: should be validating if index options go "up" (but "down" is ok)
@ -240,7 +257,7 @@ public class MappedFieldType extends FieldType {
conflicts.add("mapper [" + names().fullName() + "] has different store values");
}
if (hasDocValues() == false && other.hasDocValues()) {
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitly set
// when the doc_values field data format is configured
conflicts.add("mapper [" + names().fullName() + "] has different doc_values values");
}
@ -277,10 +294,30 @@ public class MappedFieldType extends FieldType {
if (!names().equals(other.names())) {
conflicts.add("mapper [" + names().fullName() + "] has different index_name");
}
if (Objects.equals(similarity(), other.similarity()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different similarity");
}
if (strict) {
if (omitNorms() != other.omitNorms()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [omit_norms] across all types.");
}
if (boost() != other.boost()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types.");
}
if (normsLoading() != other.normsLoading()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [norms].loading across all types.");
}
if (Objects.equals(searchAnalyzer(), other.searchAnalyzer()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types.");
}
if (Objects.equals(fieldDataType(), other.fieldDataType()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types.");
}
if (Objects.equals(nullValue(), other.nullValue()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types.");
}
}
}
public boolean isNumeric() {

View File

@ -18,10 +18,33 @@
*/
package org.elasticsearch.index.mapper;
public class MappedFieldTypeTests extends FieldTypeTestCase {
/**
* A container for a {@link MappedFieldType} which can be updated and is reference counted.
*/
public class MappedFieldTypeReference {
private MappedFieldType fieldType; // the current field type this reference points to
private int numAssociatedMappers;
@Override
public MappedFieldType createDefaultFieldType() {
return new MappedFieldType();
public MappedFieldTypeReference(MappedFieldType fieldType) {
fieldType.freeze(); // ensure frozen
this.fieldType = fieldType;
this.numAssociatedMappers = 1;
}
public MappedFieldType get() {
return fieldType;
}
public void set(MappedFieldType fieldType) {
fieldType.freeze(); // ensure frozen
this.fieldType = fieldType;
}
public int getNumAssociatedMappers() {
return numAssociatedMappers;
}
public void incrementAssociatedMappers() {
++numAssociatedMappers;
}
}

View File

@ -86,14 +86,18 @@ public interface Mapper extends ToXContent, Iterable<Mapper> {
private final SimilarityLookupService similarityLookupService;
private final MapperService mapperService;
private final ImmutableMap<String, TypeParser> typeParsers;
private final Version indexVersionCreated;
public ParserContext(AnalysisService analysisService, SimilarityLookupService similarityLookupService,
MapperService mapperService,
ImmutableMap<String, TypeParser> typeParsers, Version indexVersionCreated) {
this.analysisService = analysisService;
this.similarityLookupService = similarityLookupService;
this.mapperService = mapperService;
this.typeParsers = typeParsers;
this.indexVersionCreated = indexVersionCreated;
}
@ -106,6 +110,10 @@ public interface Mapper extends ToXContent, Iterable<Mapper> {
return similarityLookupService;
}
public MapperService mapperService() {
return mapperService;
}
public TypeParser typeParser(String type) {
return typeParsers.get(Strings.toUnderscoreCase(type));
}

View File

@ -1,50 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
/** Hacky analyzer to dispatch per-thread based on the type of the current document being indexed, to look up the per-field Analyzer. Once
* mappings are moved to the index level we can remove this. */
public class MapperAnalyzer extends DelegatingAnalyzerWrapper {
private final MapperService mapperService;
/** Which type this thread is currently indexing. */
private final ThreadLocal<String> threadTypes = new ThreadLocal<>();
public MapperAnalyzer(MapperService mapperService) {
super(Analyzer.PER_FIELD_REUSE_STRATEGY);
this.mapperService = mapperService;
}
/** Any thread that is about to use this analyzer for indexing must first set the type by calling this. */
public void setType(String type) {
threadTypes.set(type);
}
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
// First get the FieldNameAnalyzer from the type, then ask it for the right analyzer for this field, or the default index analyzer:
return ((FieldNameAnalyzer) mapperService.documentMapper(threadTypes.get()).mappers().indexAnalyzer()).getWrappedAnalyzer(fieldName);
}
}

View File

@ -28,69 +28,12 @@ import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
/**
*
*/
public final class MapperBuilders {
private MapperBuilders() {
private MapperBuilders() {}
}
public static DocumentMapper.Builder doc(String index, Settings settings, RootObjectMapper.Builder objectBuilder) {
return new DocumentMapper.Builder(index, settings, objectBuilder);
}
public static SourceFieldMapper.Builder source() {
return new SourceFieldMapper.Builder();
}
public static IdFieldMapper.Builder id() {
return new IdFieldMapper.Builder();
}
public static RoutingFieldMapper.Builder routing() {
return new RoutingFieldMapper.Builder();
}
public static UidFieldMapper.Builder uid() {
return new UidFieldMapper.Builder();
}
public static SizeFieldMapper.Builder size() {
return new SizeFieldMapper.Builder();
}
public static VersionFieldMapper.Builder version() {
return new VersionFieldMapper.Builder();
}
public static TypeFieldMapper.Builder type() {
return new TypeFieldMapper.Builder();
}
public static FieldNamesFieldMapper.Builder fieldNames() {
return new FieldNamesFieldMapper.Builder();
}
public static IndexFieldMapper.Builder index() {
return new IndexFieldMapper.Builder();
}
public static TimestampFieldMapper.Builder timestamp() {
return new TimestampFieldMapper.Builder();
}
public static TTLFieldMapper.Builder ttl() {
return new TTLFieldMapper.Builder();
}
public static ParentFieldMapper.Builder parent() {
return new ParentFieldMapper.Builder();
}
public static AllFieldMapper.Builder all() {
return new AllFieldMapper.Builder();
public static DocumentMapper.Builder doc(String index, Settings settings, RootObjectMapper.Builder objectBuilder, MapperService mapperService) {
return new DocumentMapper.Builder(index, settings, objectBuilder, mapperService);
}
public static RootObjectMapper.Builder rootObject(String name) {

View File

@ -21,7 +21,10 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectHashSet;
import com.google.common.base.Predicate;
import com.google.common.collect.*;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
@ -36,7 +39,6 @@ import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
@ -70,6 +72,7 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
@ -83,6 +86,23 @@ public class MapperService extends AbstractIndexComponent {
"_uid", "_id", "_type", "_all", "_parent", "_routing", "_index",
"_size", "_timestamp", "_ttl"
);
private static final Function<MappedFieldType, Analyzer> INDEX_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
public Analyzer apply(MappedFieldType fieldType) {
return fieldType.indexAnalyzer();
}
};
private static final Function<MappedFieldType, Analyzer> SEARCH_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
public Analyzer apply(MappedFieldType fieldType) {
return fieldType.searchAnalyzer();
}
};
private static final Function<MappedFieldType, Analyzer> SEARCH_QUOTE_ANALYZER_EXTRACTOR = new Function<MappedFieldType, Analyzer>() {
public Analyzer apply(MappedFieldType fieldType) {
return fieldType.searchQuoteAnalyzer();
}
};
private final AnalysisService analysisService;
private final IndexFieldDataService fieldDataService;
@ -102,14 +122,15 @@ public class MapperService extends AbstractIndexComponent {
final ReentrantReadWriteLock mappingLock = new ReentrantReadWriteLock();
private final ReleasableLock mappingWriteLock = new ReleasableLock(mappingLock.writeLock());
private volatile FieldMappersLookup fieldMappers;
private volatile ImmutableOpenMap<String, ObjectMappers> fullPathObjectMappers = ImmutableOpenMap.of();
private volatile FieldTypeLookup fieldTypes;
private volatile ImmutableOpenMap<String, ObjectMapper> fullPathObjectMappers = ImmutableOpenMap.of();
private boolean hasNested = false; // updated dynamically to true when a nested object is added
private final DocumentMapperParser documentParser;
private final SmartIndexNameSearchAnalyzer searchAnalyzer;
private final SmartIndexNameSearchQuoteAnalyzer searchQuoteAnalyzer;
private final MapperAnalyzerWrapper indexAnalyzer;
private final MapperAnalyzerWrapper searchAnalyzer;
private final MapperAnalyzerWrapper searchQuoteAnalyzer;
private final List<DocumentTypeListener> typeListeners = new CopyOnWriteArrayList<>();
@ -124,10 +145,11 @@ public class MapperService extends AbstractIndexComponent {
super(index, indexSettings);
this.analysisService = analysisService;
this.fieldDataService = fieldDataService;
this.fieldMappers = new FieldMappersLookup();
this.fieldTypes = new FieldTypeLookup();
this.documentParser = new DocumentMapperParser(index, indexSettings, this, analysisService, similarityLookupService, scriptService);
this.searchAnalyzer = new SmartIndexNameSearchAnalyzer(analysisService.defaultSearchAnalyzer());
this.searchQuoteAnalyzer = new SmartIndexNameSearchQuoteAnalyzer(analysisService.defaultSearchQuoteAnalyzer());
this.indexAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultIndexAnalyzer(), INDEX_ANALYZER_EXTRACTOR);
this.searchAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchAnalyzer(), SEARCH_ANALYZER_EXTRACTOR);
this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), SEARCH_QUOTE_ANALYZER_EXTRACTOR);
this.dynamic = indexSettings.getAsBoolean("index.mapper.dynamic", true);
defaultPercolatorMappingSource = "{\n" +
@ -214,7 +236,7 @@ public class MapperService extends AbstractIndexComponent {
typeListeners.remove(listener);
}
public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault) {
public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault, boolean updateAllTypes) {
if (DEFAULT_MAPPING.equals(type)) {
// verify we can parse it
DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource);
@ -230,13 +252,13 @@ public class MapperService extends AbstractIndexComponent {
}
return mapper;
} else {
return merge(parse(type, mappingSource, applyDefault));
return merge(parse(type, mappingSource, applyDefault), updateAllTypes);
}
}
// never expose this to the outside world, we need to reparse the doc mapper so we get fresh
// instances of field mappers to properly remove existing doc mapper
private DocumentMapper merge(DocumentMapper mapper) {
private DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) {
try (ReleasableLock lock = mappingWriteLock.acquire()) {
if (mapper.type().length() == 0) {
throw new InvalidTypeNameException("mapping type name is empty");
@ -262,7 +284,7 @@ public class MapperService extends AbstractIndexComponent {
DocumentMapper oldMapper = mappers.get(mapper.type());
if (oldMapper != null) {
MergeResult result = oldMapper.merge(mapper.mapping(), false);
MergeResult result = oldMapper.merge(mapper.mapping(), false, updateAllTypes);
if (result.hasConflicts()) {
// TODO: What should we do???
if (logger.isDebugEnabled()) {
@ -270,19 +292,18 @@ public class MapperService extends AbstractIndexComponent {
}
}
fieldDataService.onMappingUpdate();
assert assertSerialization(oldMapper);
return oldMapper;
} else {
List<ObjectMapper> newObjectMappers = new ArrayList<>();
List<FieldMapper> newFieldMappers = new ArrayList<>();
for (RootMapper rootMapper : mapper.mapping().rootMappers) {
if (rootMapper instanceof FieldMapper) {
newFieldMappers.add((FieldMapper)rootMapper);
newFieldMappers.add((FieldMapper) rootMapper);
}
}
MapperUtils.collect(mapper.mapping().root, newObjectMappers, newFieldMappers);
addFieldMappers(newFieldMappers);
addObjectMappers(newObjectMappers);
checkNewMappersCompatibility(newObjectMappers, newFieldMappers, updateAllTypes);
addMappers(newObjectMappers, newFieldMappers);
for (DocumentTypeListener typeListener : typeListeners) {
typeListener.beforeCreate(mapper);
@ -313,28 +334,33 @@ public class MapperService extends AbstractIndexComponent {
return true;
}
protected void addObjectMappers(Collection<ObjectMapper> objectMappers) {
protected void checkNewMappersCompatibility(Collection<ObjectMapper> newObjectMappers, Collection<FieldMapper> newFieldMappers, boolean updateAllTypes) {
assert mappingLock.isWriteLockedByCurrentThread();
ImmutableOpenMap.Builder<String, ObjectMappers> fullPathObjectMappers = ImmutableOpenMap.builder(this.fullPathObjectMappers);
for (ObjectMapper objectMapper : objectMappers) {
ObjectMappers mappers = fullPathObjectMappers.get(objectMapper.fullPath());
if (mappers == null) {
mappers = new ObjectMappers(objectMapper);
} else {
mappers = mappers.concat(objectMapper);
for (ObjectMapper newObjectMapper : newObjectMappers) {
ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath());
if (existingObjectMapper != null) {
MergeResult result = new MergeResult(true, updateAllTypes);
existingObjectMapper.merge(newObjectMapper, result);
if (result.hasConflicts()) {
throw new IllegalArgumentException("Mapper for [" + newObjectMapper.fullPath() + "] conflicts with existing mapping in other types" +
Arrays.toString(result.buildConflicts()));
}
}
fullPathObjectMappers.put(objectMapper.fullPath(), mappers);
// update the hasNested flag
}
fieldTypes.checkCompatibility(newFieldMappers, updateAllTypes);
}
protected void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {
assert mappingLock.isWriteLockedByCurrentThread();
ImmutableOpenMap.Builder<String, ObjectMapper> fullPathObjectMappers = ImmutableOpenMap.builder(this.fullPathObjectMappers);
for (ObjectMapper objectMapper : objectMappers) {
fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNested = true;
}
}
this.fullPathObjectMappers = fullPathObjectMappers.build();
}
protected void addFieldMappers(Collection<FieldMapper> fieldMappers) {
assert mappingLock.isWriteLockedByCurrentThread();
this.fieldMappers = this.fieldMappers.copyAndAddAll(fieldMappers);
this.fieldTypes = this.fieldTypes.copyAndAddAll(fieldMappers);
}
public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException {
@ -479,11 +505,7 @@ public class MapperService extends AbstractIndexComponent {
* If multiple types have fields with the same index name, the first is returned.
*/
public MappedFieldType indexName(String indexName) {
FieldMappers mappers = fieldMappers.indexName(indexName);
if (mappers == null) {
return null;
}
return mappers.mapper().fieldType();
return fieldTypes.getByIndexName(indexName);
}
/**
@ -492,11 +514,7 @@ public class MapperService extends AbstractIndexComponent {
* If multiple types have fields with the same full name, the first is returned.
*/
public MappedFieldType fullName(String fullName) {
FieldMappers mappers = fieldMappers.fullName(fullName);
if (mappers == null) {
return null;
}
return mappers.mapper().fieldType();
return fieldTypes.get(fullName);
}
/**
@ -504,52 +522,21 @@ public class MapperService extends AbstractIndexComponent {
* then the fields will be returned with a type prefix.
*/
public Collection<String> simpleMatchToIndexNames(String pattern) {
return simpleMatchToIndexNames(pattern, null);
}
/**
* Returns all the fields that match the given pattern, with an optional narrowing
* based on a list of types.
*/
public Collection<String> simpleMatchToIndexNames(String pattern, @Nullable String[] types) {
if (Regex.isSimpleMatchPattern(pattern) == false) {
// no wildcards
return ImmutableList.of(pattern);
}
if (MetaData.isAllTypes(types)) {
return fieldMappers.simpleMatchToIndexNames(pattern);
}
List<String> fields = Lists.newArrayList();
for (String type : types) {
DocumentMapper possibleDocMapper = mappers.get(type);
if (possibleDocMapper != null) {
for (String indexName : possibleDocMapper.mappers().simpleMatchToIndexNames(pattern)) {
fields.add(indexName);
}
}
}
return fields;
return fieldTypes.simpleMatchToIndexNames(pattern);
}
// TODO: remove this since the underlying index names are now the same across all types
public Collection<String> simpleMatchToIndexNames(String pattern, @Nullable String[] types) {
return simpleMatchToIndexNames(pattern);
}
// TODO: remove types param, since the object mapper must be the same across all types
public ObjectMapper getObjectMapper(String name, @Nullable String[] types) {
if (types == null || types.length == 0 || types.length == 1 && types[0].equals("_all")) {
ObjectMappers mappers = fullPathObjectMappers.get(name);
if (mappers != null) {
return mappers.mapper();
}
return null;
}
for (String type : types) {
DocumentMapper possibleDocMapper = mappers.get(type);
if (possibleDocMapper != null) {
ObjectMapper mapper = possibleDocMapper.objectMappers().get(name);
if (mapper != null) {
return mapper;
}
}
}
return null;
return fullPathObjectMappers.get(name);
}
public MappedFieldType smartNameFieldType(String smartName) {
@ -560,22 +547,9 @@ public class MapperService extends AbstractIndexComponent {
return indexName(smartName);
}
// TODO: remove this since the underlying index names are now the same across all types
public MappedFieldType smartNameFieldType(String smartName, @Nullable String[] types) {
if (types == null || types.length == 0 || types.length == 1 && types[0].equals("_all")) {
return smartNameFieldType(smartName);
}
for (String type : types) {
DocumentMapper documentMapper = mappers.get(type);
// we found a mapper
if (documentMapper != null) {
// see if we find a field for it
FieldMappers mappers = documentMapper.mappers().smartName(smartName);
if (mappers != null) {
return mappers.mapper().fieldType();
}
}
}
return null;
return smartNameFieldType(smartName);
}
/**
@ -604,6 +578,10 @@ public class MapperService extends AbstractIndexComponent {
return fieldType;
}
public Analyzer indexAnalyzer() {
return this.indexAnalyzer;
}
public Analyzer searchAnalyzer() {
return this.searchAnalyzer;
}
@ -622,18 +600,14 @@ public class MapperService extends AbstractIndexComponent {
} else {
do {
String objectPath = fieldName.substring(0, indexOf);
ObjectMappers objectMappers = fullPathObjectMappers.get(objectPath);
if (objectMappers == null) {
ObjectMapper objectMapper = fullPathObjectMappers.get(objectPath);
if (objectMapper == null) {
indexOf = objectPath.lastIndexOf('.');
continue;
}
if (objectMappers.hasNested()) {
for (ObjectMapper objectMapper : objectMappers) {
if (objectMapper.nested().isNested()) {
return objectMapper;
}
}
if (objectMapper.nested().isNested()) {
return objectMapper;
}
indexOf = objectPath.lastIndexOf('.');
@ -654,39 +628,26 @@ public class MapperService extends AbstractIndexComponent {
return META_FIELDS.contains(fieldName);
}
final class SmartIndexNameSearchAnalyzer extends DelegatingAnalyzerWrapper {
/** An analyzer wrapper that can lookup fields within the index mappings */
final class MapperAnalyzerWrapper extends DelegatingAnalyzerWrapper {
private final Analyzer defaultAnalyzer;
private final Function<MappedFieldType, Analyzer> extractAnalyzer;
SmartIndexNameSearchAnalyzer(Analyzer defaultAnalyzer) {
MapperAnalyzerWrapper(Analyzer defaultAnalyzer, Function<MappedFieldType, Analyzer> extractAnalyzer) {
super(Analyzer.PER_FIELD_REUSE_STRATEGY);
this.defaultAnalyzer = defaultAnalyzer;
this.extractAnalyzer = extractAnalyzer;
}
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
MappedFieldType fieldType = smartNameFieldType(fieldName);
if (fieldType != null && fieldType.searchAnalyzer() != null) {
return fieldType.searchAnalyzer();
}
return defaultAnalyzer;
}
}
final class SmartIndexNameSearchQuoteAnalyzer extends DelegatingAnalyzerWrapper {
private final Analyzer defaultAnalyzer;
SmartIndexNameSearchQuoteAnalyzer(Analyzer defaultAnalyzer) {
super(Analyzer.PER_FIELD_REUSE_STRATEGY);
this.defaultAnalyzer = defaultAnalyzer;
}
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
MappedFieldType fieldType = smartNameFieldType(fieldName);
if (fieldType != null && fieldType.searchQuoteAnalyzer() != null) {
return fieldType.searchQuoteAnalyzer();
if (fieldType != null) {
Analyzer analyzer = extractAnalyzer.apply(fieldType);
if (analyzer != null) {
return analyzer;
}
}
return defaultAnalyzer;
}

View File

@ -19,28 +19,16 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.io.IOException;
import java.util.Collection;
public enum MapperUtils {
;
private static MergeResult newStrictMergeResult() {
return new MergeResult(false) {
@Override
public boolean hasConflicts() {
return false;
}
@Override
public String[] buildConflicts() {
return Strings.EMPTY_ARRAY;
}
return new MergeResult(false, false) {
@Override
public void addFieldMappers(Collection<FieldMapper> fieldMappers) {

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.util.ArrayList;
@ -26,29 +27,55 @@ import java.util.Collection;
import java.util.List;
/** A container for tracking results of a mapping merge. */
public abstract class MergeResult {
public class MergeResult {
private final boolean simulate;
private final boolean updateAllTypes;
public MergeResult(boolean simulate) {
private final List<String> conflicts = new ArrayList<>();
private final List<FieldMapper> newFieldMappers = new ArrayList<>();
private final List<ObjectMapper> newObjectMappers = new ArrayList<>();
public MergeResult(boolean simulate, boolean updateAllTypes) {
this.simulate = simulate;
this.updateAllTypes = updateAllTypes;
}
public abstract void addFieldMappers(Collection<FieldMapper> fieldMappers);
public void addFieldMappers(Collection<FieldMapper> fieldMappers) {
assert simulate() == false;
newFieldMappers.addAll(fieldMappers);
}
public abstract void addObjectMappers(Collection<ObjectMapper> objectMappers);
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
assert simulate() == false;
newObjectMappers.addAll(objectMappers);
}
public abstract Collection<FieldMapper> getNewFieldMappers();
public Collection<FieldMapper> getNewFieldMappers() {
return newFieldMappers;
}
public abstract Collection<ObjectMapper> getNewObjectMappers();
public Collection<ObjectMapper> getNewObjectMappers() {
return newObjectMappers;
}
public boolean simulate() {
return simulate;
}
public abstract void addConflict(String mergeFailure);
public boolean updateAllTypes() {
return updateAllTypes;
}
public abstract boolean hasConflicts();
public void addConflict(String mergeFailure) {
conflicts.add(mergeFailure);
}
public abstract String[] buildConflicts();
public boolean hasConflicts() {
return conflicts.isEmpty() == false;
}
public String[] buildConflicts() {
return conflicts.toArray(Strings.EMPTY_ARRAY);
}
}

View File

@ -280,6 +280,11 @@ public abstract class ParseContext {
return in.analysisService();
}
@Override
public MapperService mapperService() {
return in.mapperService();
}
@Override
public String id() {
return in.id();
@ -513,6 +518,11 @@ public abstract class ParseContext {
return docMapperParser.analysisService;
}
@Override
public MapperService mapperService() {
return docMapperParser.mapperService;
}
@Override
public String id() {
return id;
@ -701,6 +711,8 @@ public abstract class ParseContext {
public abstract AnalysisService analysisService();
public abstract MapperService mapperService();
public abstract String id();
public abstract void ignoredValue(String indexName, String value);

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.base.Function;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import org.apache.lucene.document.Field;
@ -39,6 +38,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldTypeReference;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
@ -63,18 +63,6 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.DOC_VALUES;
public abstract class AbstractFieldMapper implements FieldMapper {
public static class Defaults {
public static final MappedFieldType FIELD_TYPE = new MappedFieldType();
static {
FIELD_TYPE.setTokenized(true);
FIELD_TYPE.setStored(false);
FIELD_TYPE.setStoreTermVectors(false);
FIELD_TYPE.setOmitNorms(false);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
FIELD_TYPE.setBoost(Defaults.BOOST);
FIELD_TYPE.freeze();
}
public static final float BOOST = 1.0f;
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
}
@ -133,7 +121,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
public T storeTermVectors(boolean termVectors) {
if (termVectors) {
if (termVectors != this.fieldType.storeTermVectors()) {
this.fieldType.setStoreTermVectors(termVectors);
} // don't set it to false, it is default and might be flipped by a more specific option
return builder;
@ -268,7 +256,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
}
protected MappedFieldType fieldType;
protected MappedFieldTypeReference fieldTypeRef;
protected final boolean hasDefaultDocValues;
protected Settings customFieldDataSettings;
protected final MultiFields multiFields;
@ -302,14 +290,16 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
hasDefaultDocValues = docValues == null;
this.fieldType = fieldType.clone();
this.fieldTypeRef = new MappedFieldTypeReference(fieldType); // must init first so defaultDocValues() can be called
fieldType = fieldType.clone();
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
this.fieldType().setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
this.fieldType().setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
}
this.fieldType().setHasDocValues(docValues == null ? defaultDocValues() : docValues);
this.fieldType().setFieldDataType(fieldDataType);
this.fieldType().freeze();
fieldType.setHasDocValues(docValues == null ? defaultDocValues() : docValues);
fieldType.setFieldDataType(fieldDataType);
fieldType.freeze();
this.fieldTypeRef.set(fieldType); // now reset ref once extra settings have been initialized
this.multiFields = multiFields;
this.copyTo = copyTo;
@ -335,7 +325,21 @@ public abstract class AbstractFieldMapper implements FieldMapper {
@Override
public MappedFieldType fieldType() {
return fieldType;
return fieldTypeRef.get();
}
@Override
public MappedFieldTypeReference fieldTypeReference() {
return fieldTypeRef;
}
@Override
public void setFieldTypeReference(MappedFieldTypeReference ref) {
if (ref.get().equals(fieldType()) == false) {
throw new IllegalStateException("Cannot overwrite field type reference to unequal reference");
}
ref.incrementAssociatedMappers();
this.fieldTypeRef = ref;
}
@Override
@ -393,7 +397,16 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
AbstractFieldMapper fieldMergeWith = (AbstractFieldMapper) mergeWith;
List<String> subConflicts = new ArrayList<>(); // TODO: just expose list from MergeResult?
fieldType().checkCompatibility(fieldMergeWith.fieldType(), subConflicts);
fieldType().checkTypeName(fieldMergeWith.fieldType(), subConflicts);
if (subConflicts.isEmpty() == false) {
// return early if field types don't match
assert subConflicts.size() == 1;
mergeResult.addConflict(subConflicts.get(0));
return;
}
boolean strict = this.fieldTypeRef.getNumAssociatedMappers() > 1 && mergeResult.updateAllTypes() == false;
fieldType().checkCompatibility(fieldMergeWith.fieldType(), subConflicts, strict);
for (String conflict : subConflicts) {
mergeResult.addConflict(conflict);
}
@ -401,13 +414,10 @@ public abstract class AbstractFieldMapper implements FieldMapper {
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
// apply changeable values
this.fieldType = fieldMergeWith.fieldType().clone();
this.fieldType().freeze();
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
}
}
MappedFieldType fieldType = fieldMergeWith.fieldType().clone();
fieldType.freeze();
fieldTypeRef.set(fieldType);
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
this.copyTo = fieldMergeWith.copyTo;
}
}
@ -468,7 +478,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
TreeMap<String, Object> orderedFielddataSettings = new TreeMap<>();
if (customFieldDataSettings != null) {
if (hasCustomFieldDataSettings()) {
orderedFielddataSettings.putAll(customFieldDataSettings.getAsMap());
builder.field("fielddata", orderedFielddataSettings);
} else if (includeDefaults) {
@ -548,6 +558,10 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
}
protected boolean hasCustomFieldDataSettings() {
return customFieldDataSettings != null && customFieldDataSettings.equals(Settings.EMPTY) == false;
}
protected abstract String contentType();
@Override

View File

@ -109,9 +109,7 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
static final class BinaryFieldType extends MappedFieldType {
private boolean tryUncompressing = false;
public BinaryFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public BinaryFieldType() {}
protected BinaryFieldType(BinaryFieldType ref) {
super(ref);
@ -135,6 +133,11 @@ public class BinaryFieldMapper extends AbstractFieldMapper {
return Objects.hash(super.hashCode(), tryUncompressing);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
public boolean tryUncompressing() {
return tryUncompressing;
}

View File

@ -118,9 +118,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
public static final class BooleanFieldType extends MappedFieldType {
public BooleanFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public BooleanFieldType() {}
protected BooleanFieldType(BooleanFieldType ref) {
super(ref);
@ -131,6 +129,11 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
return new BooleanFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Boolean nullValue() {
return (Boolean)super.nullValue();

View File

@ -121,7 +121,9 @@ public class ByteFieldMapper extends NumberFieldMapper {
}
static final class ByteFieldType extends NumberFieldType {
public ByteFieldType() {}
public ByteFieldType() {
super(NumericType.INT);
}
protected ByteFieldType(ByteFieldType ref) {
super(ref);
@ -132,6 +134,11 @@ public class ByteFieldMapper extends NumberFieldMapper {
return new ByteFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Byte nullValue() {
return (Byte)super.nullValue();

View File

@ -226,9 +226,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
private AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider;
private SortedMap<String, ContextMapping> contextMapping = ContextMapping.EMPTY_MAPPING;
public CompletionFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public CompletionFieldType() {}
protected CompletionFieldType(CompletionFieldType ref) {
super(ref);
@ -243,8 +241,13 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
super.checkCompatibility(fieldType, conflicts);
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
super.checkCompatibility(fieldType, conflicts, strict);
CompletionFieldType other = (CompletionFieldType)fieldType;
if (analyzingSuggestLookupProvider.hasPayloads() != other.analyzingSuggestLookupProvider.hasPayloads()) {
conflicts.add("mapper [" + names().fullName() + "] has different payload values");

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
@ -221,7 +222,9 @@ public class DateFieldMapper extends NumberFieldMapper {
protected TimeUnit timeUnit = Defaults.TIME_UNIT;
protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter);
public DateFieldType() {}
public DateFieldType() {
super(NumericType.LONG);
}
protected DateFieldType(DateFieldType ref) {
super(ref);
@ -239,6 +242,7 @@ public class DateFieldMapper extends NumberFieldMapper {
if (!super.equals(o)) return false;
DateFieldType that = (DateFieldType) o;
return Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format()) &&
Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale()) &&
Objects.equals(timeUnit, that.timeUnit);
}
@ -247,6 +251,28 @@ public class DateFieldMapper extends NumberFieldMapper {
return Objects.hash(super.hashCode(), dateTimeFormatter.format(), timeUnit);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
super.checkCompatibility(fieldType, conflicts, strict);
if (strict) {
DateFieldType other = (DateFieldType)fieldType;
if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [format] across all types.");
}
if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types.");
}
if (Objects.equals(timeUnit(), other.timeUnit()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types.");
}
}
}
public FormatDateTimeFormatter dateTimeFormatter() {
return dateTimeFormatter;
}

View File

@ -20,9 +20,11 @@
package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.DoubleArrayList;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.NumericRangeQuery;
@ -124,9 +126,11 @@ public class DoubleFieldMapper extends NumberFieldMapper {
}
}
static final class DoubleFieldType extends NumberFieldType {
public static final class DoubleFieldType extends NumberFieldType {
public DoubleFieldType() {}
public DoubleFieldType() {
super(NumericType.DOUBLE);
}
protected DoubleFieldType(DoubleFieldType ref) {
super(ref);
@ -137,6 +141,11 @@ public class DoubleFieldMapper extends NumberFieldMapper {
return new DoubleFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Double nullValue() {
return (Double)super.nullValue();

View File

@ -20,9 +20,11 @@
package org.elasticsearch.index.mapper.core;
import com.carrotsearch.hppc.FloatArrayList;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.NumericRangeQuery;
@ -127,7 +129,9 @@ public class FloatFieldMapper extends NumberFieldMapper {
static final class FloatFieldType extends NumberFieldType {
public FloatFieldType() {}
public FloatFieldType() {
super(NumericType.FLOAT);
}
protected FloatFieldType(FloatFieldType ref) {
super(ref);
@ -138,6 +142,11 @@ public class FloatFieldMapper extends NumberFieldMapper {
return new FloatFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Float nullValue() {
return (Float)super.nullValue();

View File

@ -130,7 +130,9 @@ public class IntegerFieldMapper extends NumberFieldMapper {
public static final class IntegerFieldType extends NumberFieldType {
public IntegerFieldType() {}
public IntegerFieldType() {
super(NumericType.INT);
}
protected IntegerFieldType(IntegerFieldType ref) {
super(ref);
@ -141,6 +143,11 @@ public class IntegerFieldMapper extends NumberFieldMapper {
return new IntegerFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Integer nullValue() {
return (Integer)super.nullValue();

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.NumericRangeQuery;
@ -128,7 +129,9 @@ public class LongFieldMapper extends NumberFieldMapper {
public static class LongFieldType extends NumberFieldType {
public LongFieldType() {}
public LongFieldType() {
super(NumericType.LONG);
}
protected LongFieldType(LongFieldType ref) {
super(ref);
@ -139,6 +142,11 @@ public class LongFieldMapper extends NumberFieldMapper {
return new LongFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Long nullValue() {
return (Long)super.nullValue();

View File

@ -135,12 +135,12 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
public static abstract class NumberFieldType extends MappedFieldType {
public NumberFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
public NumberFieldType(NumericType numericType) {
setTokenized(false);
setOmitNorms(true);
setIndexOptions(IndexOptions.DOCS);
setStoreTermVectors(false);
setNumericType(numericType);
}
protected NumberFieldType(NumberFieldType ref) {
@ -317,8 +317,14 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeResult.simulate()) {
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
if (this.fieldTypeRef.getNumAssociatedMappers() > 1 && mergeResult.updateAllTypes() == false) {
if (fieldType().numericPrecisionStep() != nfmMergeWith.fieldType().numericPrecisionStep()) {
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] is used by multiple types. Set update_all_types to true to update precision_step across all types.");
}
}
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
this.includeInAll = nfmMergeWith.includeInAll;
if (nfmMergeWith.ignoreMalformed.explicit()) {
this.ignoreMalformed = nfmMergeWith.ignoreMalformed;

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.NumericRangeQuery;
@ -126,7 +127,9 @@ public class ShortFieldMapper extends NumberFieldMapper {
static final class ShortFieldType extends NumberFieldType {
public ShortFieldType() {}
public ShortFieldType() {
super(NumericType.INT);
}
protected ShortFieldType(ShortFieldType ref) {
super(ref);
@ -137,6 +140,11 @@ public class ShortFieldMapper extends NumberFieldMapper {
return new ShortFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Short nullValue() {
return (Short)super.nullValue();

View File

@ -186,9 +186,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
public static final class StringFieldType extends MappedFieldType {
public StringFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public StringFieldType() {}
protected StringFieldType(StringFieldType ref) {
super(ref);
@ -198,6 +196,11 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
return new StringFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public String value(Object value) {
if (value == null) {

View File

@ -182,8 +182,8 @@ public class TypeParsers {
}
public static void parseField(AbstractFieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
NamedAnalyzer indexAnalyzer = null;
NamedAnalyzer searchAnalyzer = null;
NamedAnalyzer indexAnalyzer = builder.fieldType.indexAnalyzer();
NamedAnalyzer searchAnalyzer = builder.fieldType.searchAnalyzer();
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
final String propName = Strings.toUnderscoreCase(entry.getKey());

View File

@ -287,9 +287,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
private boolean normalizeLon = true;
private boolean normalizeLat = true;
public GeoPointFieldType() {
super(StringFieldMapper.Defaults.FIELD_TYPE);
}
public GeoPointFieldType() {}
protected GeoPointFieldType(GeoPointFieldType ref) {
super(ref);
@ -330,8 +328,13 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
super.checkCompatibility(fieldType, conflicts);
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
super.checkCompatibility(fieldType, conflicts, strict);
GeoPointFieldType other = (GeoPointFieldType)fieldType;
if (isLatLonEnabled() != other.isLatLonEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different lat_lon");

View File

@ -183,9 +183,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
private RecursivePrefixTreeStrategy recursiveStrategy;
private TermQueryPrefixTreeStrategy termStrategy;
public GeoShapeFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public GeoShapeFieldType() {}
protected GeoShapeFieldType(GeoShapeFieldType ref) {
super(ref);
@ -199,7 +197,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
}
@Override
public MappedFieldType clone() {
public GeoShapeFieldType clone() {
return new GeoShapeFieldType(this);
}
@ -221,6 +219,11 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
return Objects.hash(super.hashCode(), tree, strategyName, treeLevels, precisionInMeters, distanceErrorPct, defaultDistanceErrorPct, orientation);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void freeze() {
super.freeze();
@ -246,8 +249,8 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
super.checkCompatibility(fieldType, conflicts);
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
super.checkCompatibility(fieldType, conflicts, strict);
GeoShapeFieldType other = (GeoShapeFieldType)fieldType;
// prevent user from changing strategies
if (strategyName().equals(other.strategyName()) == false) {

View File

@ -52,7 +52,6 @@ import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
import static org.elasticsearch.index.mapper.MapperBuilders.all;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
/**
@ -92,8 +91,8 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
private EnabledAttributeMapper enabled = Defaults.ENABLED;
public Builder() {
super(Defaults.NAME, Defaults.FIELD_TYPE);
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
builder = this;
indexName = Defaults.INDEX_NAME;
}
@ -119,7 +118,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
AllFieldMapper.Builder builder = all();
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
// parseField below will happily parse the doc_values setting, but it is then never passed to
// the AllFieldMapper ctor in the builder since it is not valid. Here we validate
@ -157,9 +156,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
static final class AllFieldType extends MappedFieldType {
public AllFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public AllFieldType() {}
protected AllFieldType(AllFieldType ref) {
super(ref);
@ -170,6 +167,11 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
return new AllFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public String value(Object value) {
if (value == null) {
@ -191,8 +193,11 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
private EnabledAttributeMapper enabledState;
public AllFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED, null, indexSettings);
public AllFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
Defaults.ENABLED,
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
indexSettings);
}
protected AllFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled,
@ -312,7 +317,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY);
}
if (customFieldDataSettings != null) {
if (hasCustomFieldDataSettings()) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());

View File

@ -47,7 +47,6 @@ import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.fieldNames;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
/**
@ -83,8 +82,8 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
public static class Builder extends AbstractFieldMapper.Builder<Builder, FieldNamesFieldMapper> {
private boolean enabled = Defaults.ENABLED;
public Builder() {
super(Defaults.NAME, Defaults.FIELD_TYPE);
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
indexName = Defaults.NAME;
}
@ -116,7 +115,7 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
throw new IllegalArgumentException("type="+CONTENT_TYPE+" is not supported on indices created before version 1.3.0. Is your cluster running multiple datanode versions?");
}
FieldNamesFieldMapper.Builder builder = fieldNames();
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
parseField(builder, builder.name, node, parserContext);
}
@ -138,15 +137,18 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
private boolean enabled = Defaults.ENABLED;
public FieldNamesFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public FieldNamesFieldType() {}
protected FieldNamesFieldType(FieldNamesFieldType ref) {
super(ref);
this.enabled = ref.enabled;
}
@Override
public FieldNamesFieldType clone() {
return new FieldNamesFieldType(this);
}
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
@ -159,6 +161,21 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
return Objects.hash(super.hashCode(), enabled);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
if (strict) {
FieldNamesFieldType other = (FieldNamesFieldType)fieldType;
if (isEnabled() != other.isEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [enabled] across all types.");
}
}
}
public void setEnabled(boolean enabled) {
checkIfFrozen();
this.enabled = enabled;
@ -168,11 +185,6 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
return enabled;
}
@Override
public FieldNamesFieldType clone() {
return new FieldNamesFieldType(this);
}
@Override
public String value(Object value) {
if (value == null) {
@ -190,8 +202,10 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
private final MappedFieldType defaultFieldType;
private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled
public FieldNamesFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE.clone(), null, indexSettings);
public FieldNamesFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
indexSettings);
}
public FieldNamesFieldMapper(MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
@ -199,9 +213,10 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
this.defaultFieldType = Defaults.FIELD_TYPE;
this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0);
if (this.pre13Index) {
this.fieldType = fieldType().clone();
fieldType().setEnabled(false);
fieldType().freeze();
FieldNamesFieldType newFieldType = fieldType().clone();
newFieldType.setEnabled(false);
newFieldType.freeze();
fieldTypeRef.set(newFieldType);
}
}

View File

@ -58,7 +58,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.mapper.MapperBuilders.id;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
/**
@ -92,8 +91,8 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
private String path = Defaults.PATH;
public Builder() {
super(Defaults.NAME, Defaults.FIELD_TYPE);
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
indexName = Defaults.NAME;
}
@ -120,7 +119,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
throw new MapperParsingException(NAME + " is not configurable");
}
IdFieldMapper.Builder builder = id();
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
parseField(builder, builder.name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
@ -137,9 +136,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
static final class IdFieldType extends MappedFieldType {
public IdFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public IdFieldType() {}
protected IdFieldType(IdFieldType ref) {
super(ref);
@ -150,6 +147,10 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
return new IdFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public String value(Object value) {
@ -226,8 +227,10 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
private final String path;
public IdFieldMapper(Settings indexSettings) {
this(idFieldType(indexSettings), null, Defaults.PATH, null, indexSettings);
public IdFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(idFieldType(indexSettings, existing), null, Defaults.PATH,
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
indexSettings);
}
protected IdFieldMapper(MappedFieldType fieldType, Boolean docValues, String path,
@ -236,7 +239,10 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
this.path = path;
}
private static MappedFieldType idFieldType(Settings indexSettings) {
private static MappedFieldType idFieldType(Settings indexSettings, MappedFieldType existing) {
if (existing != null) {
return existing.clone();
}
MappedFieldType fieldType = Defaults.FIELD_TYPE.clone();
boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0);
if (pre2x && indexSettings.getAsBoolean("index.mapping._id.indexed", true) == false) {
@ -311,7 +317,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored()
&& fieldType().indexOptions() == Defaults.FIELD_TYPE.indexOptions()
&& path == Defaults.PATH
&& customFieldDataSettings == null) {
&& hasCustomFieldDataSettings() == false) {
return builder;
}
builder.startObject(CONTENT_TYPE);
@ -325,7 +331,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
builder.field("path", path);
}
if (customFieldDataSettings != null) {
if (hasCustomFieldDataSettings()) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());

View File

@ -79,8 +79,8 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
public Builder() {
super(Defaults.NAME, Defaults.FIELD_TYPE);
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
indexName = Defaults.NAME;
}
@ -99,7 +99,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
IndexFieldMapper.Builder builder = MapperBuilders.index();
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
parseField(builder, builder.name, node, parserContext);
}
@ -120,9 +120,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
static final class IndexFieldType extends MappedFieldType {
public IndexFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public IndexFieldType() {}
protected IndexFieldType(IndexFieldType ref) {
super(ref);
@ -133,6 +131,11 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
return new IndexFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public String value(Object value) {
if (value == null) {
@ -144,8 +147,10 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
private EnabledAttributeMapper enabledState;
public IndexFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED_STATE, null, indexSettings);
public IndexFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing,
Defaults.ENABLED_STATE,
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()), indexSettings);
}
public IndexFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState,
@ -206,7 +211,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// if all defaults, no need to write it at all
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED_STATE && customFieldDataSettings == null) {
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED_STATE && hasCustomFieldDataSettings() == false) {
return builder;
}
builder.startObject(CONTENT_TYPE);
@ -218,7 +223,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
}
if (indexCreatedBefore2x) {
if (customFieldDataSettings != null) {
if (hasCustomFieldDataSettings()) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());

View File

@ -64,10 +64,8 @@ import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMa
public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper {
public static final String NAME = "_parent";
public static final String CONTENT_TYPE = "_parent";
public static class Defaults extends AbstractFieldMapper.Defaults {
public static final String NAME = ParentFieldMapper.NAME;
@ -81,6 +79,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
FIELD_TYPE.setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.LAZY_VALUE)));
FIELD_TYPE.freeze();
}
}
@ -121,7 +120,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
ParentFieldMapper.Builder builder = MapperBuilders.parent();
Builder builder = new Builder();
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
@ -148,9 +147,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
static final class ParentFieldType extends MappedFieldType {
public ParentFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public ParentFieldType() {}
protected ParentFieldType(ParentFieldType ref) {
super(ref);
@ -161,6 +158,11 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
return new ParentFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Uid value(Object value) {
if (value == null) {
@ -234,11 +236,11 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
this.type = type;
}
public ParentFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings);
this.fieldType = this.fieldType().clone();
this.fieldType().setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.LAZY_VALUE)));
this.fieldType().freeze();
public ParentFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
null,
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
indexSettings);
}
public String type() {
@ -328,7 +330,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
builder.startObject(CONTENT_TYPE);
builder.field("type", type);
if (customFieldDataSettings != null) {
if (hasCustomFieldDataSettings()) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
@ -339,21 +341,10 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
ParentFieldMapper other = (ParentFieldMapper) mergeWith;
if (Objects.equal(type, other.type) == false) {
mergeResult.addConflict("The _parent field's type option can't be changed: [" + type + "]->[" + other.type + "]");
}
if (!mergeResult.simulate()) {
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
this.fieldType = fieldMergeWith.fieldType().clone();
this.fieldType().freeze();
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
}
}
super.merge(mergeWith, mergeResult);
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
if (Objects.equal(type, fieldMergeWith.type) == false) {
mergeResult.addConflict("The _parent field's type option can't be changed: [" + type + "]->[" + fieldMergeWith.type + "]");
}
}

View File

@ -44,7 +44,6 @@ import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.routing;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
/**
@ -81,8 +80,8 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
private String path = Defaults.PATH;
public Builder() {
super(Defaults.NAME, Defaults.FIELD_TYPE);
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
}
public Builder required(boolean required) {
@ -97,14 +96,14 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
@Override
public RoutingFieldMapper build(BuilderContext context) {
return new RoutingFieldMapper(fieldType, required, path, fieldDataSettings, context.indexSettings());
return new RoutingFieldMapper(fieldType, required, path, context.indexSettings());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
RoutingFieldMapper.Builder builder = routing();
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
parseField(builder, builder.name, node, parserContext);
}
@ -126,9 +125,7 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
static final class RoutingFieldType extends MappedFieldType {
public RoutingFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public RoutingFieldType() {}
protected RoutingFieldType(RoutingFieldType ref) {
super(ref);
@ -139,6 +136,11 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
return new RoutingFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public String value(Object value) {
if (value == null) {
@ -151,12 +153,12 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
private boolean required;
private final String path;
public RoutingFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE, Defaults.REQUIRED, Defaults.PATH, null, indexSettings);
public RoutingFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.REQUIRED, Defaults.PATH, indexSettings);
}
protected RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, false, fieldDataSettings, indexSettings);
protected RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, Settings indexSettings) {
super(fieldType, false, null, indexSettings);
this.required = required;
this.path = path;
}

View File

@ -43,7 +43,6 @@ import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.size;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseStore;
public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
@ -61,6 +60,8 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
SIZE_FIELD_TYPE.setStored(true);
SIZE_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_32_BIT);
SIZE_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
SIZE_FIELD_TYPE.setIndexAnalyzer(NumericIntegerAnalyzer.buildNamedAnalyzer(Defaults.PRECISION_STEP_32_BIT));
SIZE_FIELD_TYPE.setSearchAnalyzer(NumericIntegerAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE));
SIZE_FIELD_TYPE.freeze();
}
}
@ -69,8 +70,8 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
protected EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
public Builder() {
super(Defaults.NAME, Defaults.SIZE_FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT);
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.SIZE_FIELD_TYPE : existing, Defaults.PRECISION_STEP_32_BIT);
builder = this;
}
@ -82,7 +83,7 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
@Override
public SizeFieldMapper build(BuilderContext context) {
setupFieldType(context);
return new SizeFieldMapper(enabledState, fieldType, fieldDataSettings, context.indexSettings());
return new SizeFieldMapper(enabledState, fieldType, context.indexSettings());
}
@Override
@ -99,7 +100,7 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
SizeFieldMapper.Builder builder = size();
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
@ -118,14 +119,12 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
private EnabledAttributeMapper enabledState;
public SizeFieldMapper(Settings indexSettings) {
this(Defaults.ENABLED_STATE, Defaults.SIZE_FIELD_TYPE.clone(), null, indexSettings);
public SizeFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(Defaults.ENABLED_STATE, existing == null ? Defaults.SIZE_FIELD_TYPE.clone() : existing.clone(), indexSettings);
}
public SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, false,
Defaults.IGNORE_MALFORMED, Defaults.COERCE, fieldDataSettings,
indexSettings, MultiFields.empty(), null);
public SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, Settings indexSettings) {
super(fieldType, false, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings, MultiFields.empty(), null);
this.enabledState = enabled;
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper.internal;
import com.google.common.base.Objects;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
@ -63,7 +62,6 @@ import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue;
import static org.elasticsearch.index.mapper.MapperBuilders.source;
/**
*
@ -150,7 +148,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
SourceFieldMapper.Builder builder = source();
Builder builder = new Builder();
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
@ -202,9 +200,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
static final class SourceFieldType extends MappedFieldType {
public SourceFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public SourceFieldType() {}
protected SourceFieldType(SourceFieldType ref) {
super(ref);
@ -215,6 +211,11 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
return new SourceFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public byte[] value(Object value) {
if (value == null) {

View File

@ -51,7 +51,6 @@ import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeTimeValue;
import static org.elasticsearch.index.mapper.MapperBuilders.ttl;
public class TTLFieldMapper extends LongFieldMapper implements RootMapper {
@ -115,7 +114,7 @@ public class TTLFieldMapper extends LongFieldMapper implements RootMapper {
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
TTLFieldMapper.Builder builder = ttl();
Builder builder = new Builder();
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());

View File

@ -50,7 +50,6 @@ import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.timestamp;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseDateTimeFormatter;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
@ -95,8 +94,12 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
private boolean explicitStore = false;
private Boolean ignoreMissing = null;
public Builder() {
super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT);
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.PRECISION_STEP_64_BIT);
if (existing != null) {
// if there is an existing type, always use that store value (only matters for < 2.0)
explicitStore = true;
}
}
DateFieldType fieldType() {
@ -137,7 +140,6 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
@Override
public TimestampFieldMapper build(BuilderContext context) {
if (explicitStore == false && context.indexCreatedVersion().before(Version.V_2_0_0)) {
assert fieldType.stored();
fieldType.setStored(false);
}
setupFieldType(context);
@ -160,7 +162,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
TimestampFieldMapper.Builder builder = timestamp();
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
parseField(builder, builder.name, node, parserContext);
}
@ -234,7 +236,10 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
}
}
private static MappedFieldType defaultFieldType(Settings settings) {
private static MappedFieldType defaultFieldType(Settings settings, MappedFieldType existing) {
if (existing != null) {
return existing;
}
return Version.indexCreated(settings).onOrAfter(Version.V_2_0_0) ? Defaults.FIELD_TYPE : Defaults.PRE_20_FIELD_TYPE;
}
@ -245,9 +250,11 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
private final MappedFieldType defaultFieldType;
private final Boolean ignoreMissing;
public TimestampFieldMapper(Settings indexSettings) {
this(defaultFieldType(indexSettings).clone(), null, Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP,
null, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings);
public TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(defaultFieldType(indexSettings, existing).clone(), null, Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP,
null, Defaults.IGNORE_MALFORMED, Defaults.COERCE,
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
indexSettings);
}
protected TimestampFieldMapper(MappedFieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState, String path,
@ -258,7 +265,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
this.enabledState = enabledState;
this.path = path;
this.defaultTimestamp = defaultTimestamp;
this.defaultFieldType = defaultFieldType(indexSettings);
this.defaultFieldType = defaultFieldType(indexSettings, null);
this.ignoreMissing = ignoreMissing;
}
@ -326,7 +333,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE;
// if all are defaults, no sense to write it at all
if (!includeDefaults && indexed == indexedDefault && customFieldDataSettings == null &&
if (!includeDefaults && indexed == indexedDefault && hasCustomFieldDataSettings() == false &&
fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH
&& fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format())
&& Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)
@ -359,7 +366,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
builder.field("ignore_missing", ignoreMissing);
}
if (indexCreatedBefore2x) {
if (customFieldDataSettings != null) {
if (hasCustomFieldDataSettings()) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());

View File

@ -50,7 +50,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.mapper.MapperBuilders.type;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
/**
@ -81,15 +80,15 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
public static class Builder extends AbstractFieldMapper.Builder<Builder, TypeFieldMapper> {
public Builder() {
super(Defaults.NAME, Defaults.FIELD_TYPE);
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
indexName = Defaults.NAME;
}
@Override
public TypeFieldMapper build(BuilderContext context) {
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
return new TypeFieldMapper(fieldType, fieldDataSettings, context.indexSettings());
return new TypeFieldMapper(fieldType, context.indexSettings());
}
}
@ -99,7 +98,7 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
throw new MapperParsingException(NAME + " is not configurable");
}
TypeFieldMapper.Builder builder = type();
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
parseField(builder, builder.name, node, parserContext);
return builder;
}
@ -107,9 +106,7 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
static final class TypeFieldType extends MappedFieldType {
public TypeFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public TypeFieldType() {}
protected TypeFieldType(TypeFieldType ref) {
super(ref);
@ -120,6 +117,11 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
return new TypeFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public String value(Object value) {
if (value == null) {
@ -142,12 +144,13 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
}
}
public TypeFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE.clone(), null, indexSettings);
public TypeFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
indexSettings);
}
public TypeFieldMapper(MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
super(fieldType, false, fieldDataSettings, indexSettings);
public TypeFieldMapper(MappedFieldType fieldType, Settings indexSettings) {
super(fieldType, false, null, indexSettings);
}
@Override

View File

@ -46,7 +46,6 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.mapper.MapperBuilders.uid;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
/**
@ -82,8 +81,8 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
public static class Builder extends AbstractFieldMapper.Builder<Builder, UidFieldMapper> {
public Builder() {
super(Defaults.NAME, Defaults.FIELD_TYPE);
public Builder(MappedFieldType existing) {
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
indexName = Defaults.NAME;
}
@ -97,10 +96,10 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = uid();
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
throw new MapperParsingException(NAME + " is not configurable");
}
Builder builder = new Builder(parserContext.mapperService().fullName(NAME));
parseField(builder, builder.name, node, parserContext);
return builder;
}
@ -108,9 +107,7 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
static final class UidFieldType extends MappedFieldType {
public UidFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public UidFieldType() {}
protected UidFieldType(UidFieldType ref) {
super(ref);
@ -121,6 +118,11 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
return new UidFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Uid value(Object value) {
if (value == null) {
@ -130,8 +132,10 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
}
}
public UidFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings);
public UidFieldMapper(Settings indexSettings, MappedFieldType existing) {
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing, null,
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
indexSettings);
}
protected UidFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) {
@ -220,13 +224,13 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// if defaults, don't output
if (!includeDefaults && customFieldDataSettings == null) {
if (!includeDefaults && hasCustomFieldDataSettings() == false) {
return builder;
}
builder.startObject(CONTENT_TYPE);
if (customFieldDataSettings != null) {
if (hasCustomFieldDataSettings()) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());

View File

@ -42,8 +42,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.mapper.MapperBuilders.version;
/** Mapper for the _version field. */
public class VersionFieldMapper extends AbstractFieldMapper implements RootMapper {
@ -77,11 +75,10 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = version();
Builder builder = new Builder();
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (fieldName.equals(DOC_VALUES_FORMAT) && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
// ignore in 1.x, reject in 2.x
iterator.remove();
@ -93,9 +90,7 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe
static final class VersionFieldType extends MappedFieldType {
public VersionFieldType() {
super(AbstractFieldMapper.Defaults.FIELD_TYPE);
}
public VersionFieldType() {}
protected VersionFieldType(VersionFieldType ref) {
super(ref);
@ -106,6 +101,11 @@ public class VersionFieldMapper extends AbstractFieldMapper implements RootMappe
return new VersionFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Long value(Object value) {
if (value == null || (value instanceof Long)) {

View File

@ -20,8 +20,10 @@
package org.elasticsearch.index.mapper.ip;
import com.google.common.net.InetAddresses;
import org.apache.lucene.analysis.NumericTokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
@ -158,7 +160,9 @@ public class IpFieldMapper extends NumberFieldMapper {
public static final class IpFieldType extends NumberFieldType {
public IpFieldType() {}
public IpFieldType() {
super(NumericType.LONG);
}
protected IpFieldType(IpFieldType ref) {
super(ref);
@ -169,6 +173,11 @@ public class IpFieldMapper extends NumberFieldMapper {
return new IpFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Long value(Object value) {
if (value == null) {

View File

@ -177,8 +177,6 @@ public class IndexShard extends AbstractIndexShardComponent {
private final MeanMetric flushMetric = new MeanMetric();
private final ShardEngineFailListener failedEngineListener = new ShardEngineFailListener();
private final MapperAnalyzer mapperAnalyzer;
private volatile boolean flushOnClose = true;
/**
@ -234,7 +232,6 @@ public class IndexShard extends AbstractIndexShardComponent {
this.refreshInterval = indexSettings.getAsTime(INDEX_REFRESH_INTERVAL, EngineConfig.DEFAULT_REFRESH_INTERVAL);
this.flushOnClose = indexSettings.getAsBoolean(INDEX_FLUSH_ON_CLOSE, true);
indexSettingsService.addListener(applyRefreshSettings);
this.mapperAnalyzer = new MapperAnalyzer(mapperService);
this.path = path;
this.mergePolicyConfig = new MergePolicyConfig(logger, indexSettings);
/* create engine config */
@ -461,7 +458,6 @@ public class IndexShard extends AbstractIndexShardComponent {
public void create(Engine.Create create) {
writeAllowed(create.origin());
create = indexingService.preCreate(create);
mapperAnalyzer.setType(create.type());
try {
if (logger.isTraceEnabled()) {
logger.trace("index [{}][{}]{}", create.type(), create.id(), create.docs());
@ -500,7 +496,6 @@ public class IndexShard extends AbstractIndexShardComponent {
public boolean index(Engine.Index index) {
writeAllowed(index.origin());
index = indexingService.preIndex(index);
mapperAnalyzer.setType(index.type());
final boolean created;
try {
if (logger.isTraceEnabled()) {
@ -1341,7 +1336,7 @@ public class IndexShard extends AbstractIndexShardComponent {
}
private final EngineConfig newEngineConfig(TranslogConfig translogConfig) {
final TranslogRecoveryPerformer translogRecoveryPerformer = new TranslogRecoveryPerformer(shardId, mapperService, mapperAnalyzer, queryParserService, indexAliasesService, indexCache) {
final TranslogRecoveryPerformer translogRecoveryPerformer = new TranslogRecoveryPerformer(shardId, mapperService, queryParserService, indexAliasesService, indexCache) {
@Override
protected void operationProcessed() {
assert recoveryState != null;
@ -1350,7 +1345,7 @@ public class IndexShard extends AbstractIndexShardComponent {
};
return new EngineConfig(shardId,
threadPool, indexingService, indexSettingsService.indexSettings(), warmer, store, deletionPolicy, mergePolicyConfig.getMergePolicy(), mergeSchedulerConfig,
mapperAnalyzer, similarityService.similarity(), codecService, failedEngineListener, translogRecoveryPerformer, indexCache.filter(), indexCache.filterPolicy(), translogConfig);
mapperService.indexAnalyzer(), similarityService.similarity(), codecService, failedEngineListener, translogRecoveryPerformer, indexCache.filter(), indexCache.filterPolicy(), translogConfig);
}
private static class IndexShardOperationCounter extends AbstractRefCounted {

View File

@ -53,17 +53,15 @@ public class TranslogRecoveryPerformer {
private final IndexQueryParserService queryParserService;
private final IndexAliasesService indexAliasesService;
private final IndexCache indexCache;
private final MapperAnalyzer mapperAnalyzer;
private final Map<String, Mapping> recoveredTypes = new HashMap<>();
private final ShardId shardId;
protected TranslogRecoveryPerformer(ShardId shardId, MapperService mapperService, MapperAnalyzer mapperAnalyzer, IndexQueryParserService queryParserService, IndexAliasesService indexAliasesService, IndexCache indexCache) {
protected TranslogRecoveryPerformer(ShardId shardId, MapperService mapperService, IndexQueryParserService queryParserService, IndexAliasesService indexAliasesService, IndexCache indexCache) {
this.shardId = shardId;
this.mapperService = mapperService;
this.queryParserService = queryParserService;
this.indexAliasesService = indexAliasesService;
this.indexCache = indexCache;
this.mapperAnalyzer = mapperAnalyzer;
}
protected Tuple<DocumentMapper, Mapping> docMapper(String type) {
@ -136,7 +134,6 @@ public class TranslogRecoveryPerformer {
source(create.source()).type(create.type()).id(create.id())
.routing(create.routing()).parent(create.parent()).timestamp(create.timestamp()).ttl(create.ttl()),
create.version(), create.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY, true, false);
mapperAnalyzer.setType(create.type()); // this is a PITA - once mappings are per index not per type this can go away an we can just simply move this to the engine eventually :)
maybeAddMappingUpdate(engineCreate.type(), engineCreate.parsedDoc().dynamicMappingsUpdate(), engineCreate.id(), allowMappingUpdates);
engine.create(engineCreate);
break;
@ -145,7 +142,6 @@ public class TranslogRecoveryPerformer {
Engine.Index engineIndex = IndexShard.prepareIndex(docMapper(index.type()), source(index.source()).type(index.type()).id(index.id())
.routing(index.routing()).parent(index.parent()).timestamp(index.timestamp()).ttl(index.ttl()),
index.version(), index.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY, true);
mapperAnalyzer.setType(index.type());
maybeAddMappingUpdate(engineIndex.type(), engineIndex.parsedDoc().dynamicMappingsUpdate(), engineIndex.id(), allowMappingUpdates);
engine.index(engineIndex);
break;

View File

@ -419,7 +419,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
logger.debug("[{}] adding mapping [{}] (source suppressed due to length, use TRACE level if needed)", index, mappingType);
}
// we don't apply default, since it has been applied when the mappings were parsed initially
mapperService.merge(mappingType, mappingSource, false);
mapperService.merge(mappingType, mappingSource, false, true);
if (!mapperService.documentMapper(mappingType).mappingSource().equals(mappingSource)) {
logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index, mappingType, mappingSource, mapperService.documentMapper(mappingType).mappingSource());
requiresRefresh = true;
@ -436,7 +436,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
logger.debug("[{}] updating mapping [{}] (source suppressed due to length, use TRACE level if needed)", index, mappingType);
}
// we don't apply default, since it has been applied when the mappings were parsed initially
mapperService.merge(mappingType, mappingSource, false);
mapperService.merge(mappingType, mappingSource, false, true);
if (!mapperService.documentMapper(mappingType).mappingSource().equals(mappingSource)) {
requiresRefresh = true;
logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index, mappingType, mappingSource, mapperService.documentMapper(mappingType).mappingSource());

View File

@ -46,6 +46,7 @@ public class RestCreateIndexAction extends BaseRestHandler {
if (request.hasContent()) {
createIndexRequest.source(request.content());
}
createIndexRequest.updateAllTypes(request.paramAsBoolean("update_all_types", false));
createIndexRequest.timeout(request.paramAsTime("timeout", createIndexRequest.timeout()));
createIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createIndexRequest.masterNodeTimeout()));
client.admin().indices().create(createIndexRequest, new AcknowledgedRestListener<CreateIndexResponse>(channel));

View File

@ -69,6 +69,7 @@ public class RestPutMappingAction extends BaseRestHandler {
PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index")));
putMappingRequest.type(request.param("type"));
putMappingRequest.source(request.content().toUtf8());
putMappingRequest.updateAllTypes(request.paramAsBoolean("update_all_types", false));
putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout()));
putMappingRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putMappingRequest.masterNodeTimeout()));
putMappingRequest.indicesOptions(IndicesOptions.fromRequest(request, putMappingRequest.indicesOptions()));

View File

@ -268,7 +268,7 @@ public class TermVectorsUnitTests extends ElasticsearchTestCase {
ft.setStoreTermVectorPositions(true);
String ftOpts = AbstractFieldMapper.termVectorOptionsToString(ft);
assertThat("with_positions_payloads", equalTo(ftOpts));
AllFieldMapper.Builder builder = new AllFieldMapper.Builder();
AllFieldMapper.Builder builder = new AllFieldMapper.Builder(null);
boolean exceptiontrown = false;
try {
TypeParsers.parseTermVector("", ftOpts, builder);

View File

@ -104,7 +104,7 @@ public class SimpleExistsTests extends ElasticsearchIntegrationTest {
createIndex("test");
client().prepareIndex("test", "type1", "1").setSource("field", 2).execute().actionGet();
client().prepareIndex("test", "type1", "2").setSource("field", 5).execute().actionGet();
client().prepareIndex("test", "type", "XXX1").setSource("field", "value").execute().actionGet();
client().prepareIndex("test", "type", "XXX1").setSource("str_field", "value").execute().actionGet();
ensureGreen();
refresh();
ExistsResponse existsResponse = client().prepareExists("test").setQuery(QueryBuilders.rangeQuery("field").gte(6).lte(8)).execute().actionGet();

View File

@ -70,21 +70,15 @@ public class RecoveryFromGatewayTests extends ElasticsearchIntegrationTest {
.endObject().endObject().string();
assertAcked(prepareCreate("test").addMapping("type1", mapping));
client().prepareIndex("test", "type1", "10990239").setSource(jsonBuilder().startObject()
.field("_id", "10990239")
.startArray("appAccountIds").value(14).value(179).endArray().endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "10990473").setSource(jsonBuilder().startObject()
.field("_id", "10990473")
.startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "10990513").setSource(jsonBuilder().startObject()
.field("_id", "10990513")
.startArray("appAccountIds").value(14).value(179).endArray().endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "10990695").setSource(jsonBuilder().startObject()
.field("_id", "10990695")
.startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "11026351").setSource(jsonBuilder().startObject()
.field("_id", "11026351")
.startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet();
refresh();

View File

@ -27,12 +27,14 @@ import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.get.*;
import org.elasticsearch.action.get.GetRequestBuilder;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetRequestBuilder;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Base64;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
@ -52,7 +54,13 @@ import java.util.Set;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.startsWith;
public class GetActionTests extends ElasticsearchIntegrationTest {
@ -258,87 +266,6 @@ public class GetActionTests extends ElasticsearchIntegrationTest {
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo(fieldValue));
}
@Test
public void getFieldsWithDifferentTypes() throws Exception {
assertAcked(prepareCreate("test").setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))
.addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject())
.addMapping("type2", jsonBuilder().startObject().startObject("type2")
.startObject("properties")
.startObject("str").field("type", "string").field("store", "yes").endObject()
.startObject("strs").field("type", "string").field("store", "yes").endObject()
.startObject("int").field("type", "integer").field("store", "yes").endObject()
.startObject("ints").field("type", "integer").field("store", "yes").endObject()
.startObject("date").field("type", "date").field("store", "yes").endObject()
.startObject("binary").field("type", "binary").field("store", "yes").endObject()
.endObject()
.endObject().endObject()));
ensureGreen();
client().prepareIndex("test", "type1", "1").setSource(
jsonBuilder().startObject()
.field("str", "test")
.field("strs", new String[]{"A", "B", "C"})
.field("int", 42)
.field("ints", new int[]{1, 2, 3, 4})
.field("date", "2012-11-13T15:26:14.000Z")
.field("binary", Base64.encodeBytes(new byte[]{1, 2, 3}))
.endObject()).get();
client().prepareIndex("test", "type2", "1").setSource(
jsonBuilder().startObject()
.field("str", "test")
.field("strs", new String[]{"A", "B", "C"})
.field("int", 42)
.field("ints", new int[]{1, 2, 3, 4})
.field("date", "2012-11-13T15:26:14.000Z")
.field("binary", Base64.encodeBytes(new byte[]{1, 2, 3}))
.endObject()).get();
// realtime get with stored source
logger.info("--> realtime get (from source)");
GetResponse getResponse = client().prepareGet("test", "type1", "1").setFields("str", "strs", "int", "ints", "date", "binary").get();
assertThat(getResponse.isExists(), equalTo(true));
assertThat((String) getResponse.getField("str").getValue(), equalTo("test"));
assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C"));
assertThat((Long) getResponse.getField("int").getValue(), equalTo(42l));
assertThat(getResponse.getField("ints").getValues(), contains((Object) 1L, 2L, 3L, 4L));
assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z"));
assertThat(getResponse.getField("binary").getValue(), instanceOf(String.class)); // its a String..., not binary mapped
logger.info("--> realtime get (from stored fields)");
getResponse = client().prepareGet("test", "type2", "1").setFields("str", "strs", "int", "ints", "date", "binary").get();
assertThat(getResponse.isExists(), equalTo(true));
assertThat((String) getResponse.getField("str").getValue(), equalTo("test"));
assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C"));
assertThat((Integer) getResponse.getField("int").getValue(), equalTo(42));
assertThat(getResponse.getField("ints").getValues(), contains((Object) 1, 2, 3, 4));
assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z"));
assertThat((BytesReference) getResponse.getField("binary").getValue(), equalTo((BytesReference) new BytesArray(new byte[]{1, 2, 3})));
logger.info("--> flush the index, so we load it from it");
flush();
logger.info("--> non realtime get (from source)");
getResponse = client().prepareGet("test", "type1", "1").setFields("str", "strs", "int", "ints", "date", "binary").get();
assertThat(getResponse.isExists(), equalTo(true));
assertThat((String) getResponse.getField("str").getValue(), equalTo("test"));
assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C"));
assertThat((Long) getResponse.getField("int").getValue(), equalTo(42l));
assertThat(getResponse.getField("ints").getValues(), contains((Object) 1L, 2L, 3L, 4L));
assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z"));
assertThat(getResponse.getField("binary").getValue(), instanceOf(String.class)); // its a String..., not binary mapped
logger.info("--> non realtime get (from stored fields)");
getResponse = client().prepareGet("test", "type2", "1").setFields("str", "strs", "int", "ints", "date", "binary").get();
assertThat(getResponse.isExists(), equalTo(true));
assertThat((String) getResponse.getField("str").getValue(), equalTo("test"));
assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C"));
assertThat((Integer) getResponse.getField("int").getValue(), equalTo(42));
assertThat(getResponse.getField("ints").getValues(), contains((Object) 1, 2, 3, 4));
assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z"));
assertThat((BytesReference) getResponse.getField("binary").getValue(), equalTo((BytesReference) new BytesArray(new byte[]{1, 2, 3})));
}
@Test
public void testGetDocWithMultivaluedFields() throws Exception {
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1")
@ -1005,7 +932,11 @@ public class GetActionTests extends ElasticsearchIntegrationTest {
" \"refresh_interval\": \"-1\"\n" +
" },\n" +
" \"mappings\": {\n" +
" \"parentdoc\": {},\n" +
" \"parentdoc\": {\n" +
" \"_ttl\": {\n" +
" \"enabled\": true\n" +
" }\n" +
" },\n" +
" \"doc\": {\n" +
" \"_parent\": {\n" +
" \"type\": \"parentdoc\"\n" +

View File

@ -1811,14 +1811,14 @@ public class InternalEngineTests extends ElasticsearchTestCase {
public final AtomicInteger recoveredOps = new AtomicInteger(0);
public TranslogHandler(String indexName) {
super(new ShardId("test", 0), null, new MapperAnalyzer(null), null, null, null);
super(new ShardId("test", 0), null, null, null, null);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder("test");
Index index = new Index(indexName);
AnalysisService analysisService = new AnalysisService(index, settings);
SimilarityLookupService similarityLookupService = new SimilarityLookupService(index, settings);
MapperService mapperService = new MapperService(index, settings, analysisService, null, similarityLookupService, null);
DocumentMapper.Builder b = new DocumentMapper.Builder(indexName, settings, rootBuilder);
DocumentMapper.Builder b = new DocumentMapper.Builder(indexName, settings, rootBuilder, mapperService);
DocumentMapperParser parser = new DocumentMapperParser(index, settings, mapperService, analysisService, similarityLookupService, null);
this.docMapper = b.build(mapperService, parser);

View File

@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.After;
@ -85,7 +86,7 @@ public abstract class AbstractFieldDataTests extends ElasticsearchSingleNodeTest
} else if (type.getType().equals("geo_point")) {
fieldType = MapperBuilders.geoPointField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("_parent")) {
fieldType = MapperBuilders.parent().type(fieldName).build(context).fieldType();
fieldType = new ParentFieldMapper.Builder().type(fieldName).build(context).fieldType();
} else if (type.getType().equals("binary")) {
fieldType = MapperBuilders.binaryField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else {

View File

@ -58,10 +58,10 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTests {
@Before
public void before() throws Exception {
mapperService.merge(
childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true
childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true, false
);
mapperService.merge(
grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true
grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true, false
);
Document d = new Document();

View File

@ -30,6 +30,8 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.core.IntegerFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import java.io.IOException;
@ -360,4 +362,50 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject()
.endObject().endObject().endObject().string(), serialize(update));
}
public void testReuseExistingMappings() throws IOException, Exception {
IndexService indexService = createIndex("test", Settings.EMPTY, "type", "my_field1", "type=string,store=yes", "my_field2", "type=integer,precision_step=10");
// Even if the dynamic type of our new field is long, we already have a mapping for the same field
// of type string so it should be mapped as a string
DocumentMapper newMapper = indexService.mapperService().documentMapperWithAutoCreate("type2").v1();
Mapper update = parse(newMapper, indexService.mapperService().documentMapperParser(),
XContentFactory.jsonBuilder().startObject().field("my_field1", 42).endObject());
Mapper myField1Mapper = null;
for (Mapper m : update) {
if (m.name().equals("my_field1")) {
myField1Mapper = m;
}
}
assertNotNull(myField1Mapper);
// same type
assertTrue(myField1Mapper instanceof StringFieldMapper);
// and same option
assertTrue(((StringFieldMapper) myField1Mapper).fieldType().stored());
// Even if dynamic mappings would map a numeric field as a long, here it should map it as a integer
// since we already have a mapping of type integer
update = parse(newMapper, indexService.mapperService().documentMapperParser(),
XContentFactory.jsonBuilder().startObject().field("my_field2", 42).endObject());
Mapper myField2Mapper = null;
for (Mapper m : update) {
if (m.name().equals("my_field2")) {
myField2Mapper = m;
}
}
assertNotNull(myField2Mapper);
// same type
assertTrue(myField2Mapper instanceof IntegerFieldMapper);
// and same option
assertEquals(10, ((IntegerFieldMapper) myField2Mapper).fieldType().numericPrecisionStep());
// This can't work
try {
parse(newMapper, indexService.mapperService().documentMapperParser(),
XContentFactory.jsonBuilder().startObject().field("my_field2", "foobar").endObject());
fail("Cannot succeed, incompatible types");
} catch (MapperParsingException e) {
// expected
}
}
}

View File

@ -1,195 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import org.apache.lucene.document.FieldType;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.test.ElasticsearchTestCase;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
public class FieldMappersLookupTests extends ElasticsearchTestCase {
public void testEmpty() {
FieldMappersLookup lookup = new FieldMappersLookup();
assertNull(lookup.fullName("foo"));
assertNull(lookup.indexName("foo"));
Collection<String> names = lookup.simpleMatchToFullName("foo");
assertNotNull(names);
assertTrue(names.isEmpty());
names = lookup.simpleMatchToFullName("foo");
assertNotNull(names);
assertTrue(names.isEmpty());
assertNull(lookup.smartName("foo"));
assertNull(lookup.smartNameFieldMapper("foo"));
assertNull(lookup.get("foo"));
Iterator<FieldMapper> itr = lookup.iterator();
assertNotNull(itr);
assertFalse(itr.hasNext());
}
public void testNewField() {
FieldMappersLookup lookup = new FieldMappersLookup();
FakeFieldMapper f = new FakeFieldMapper("foo", "bar");
FieldMappersLookup lookup2 = lookup.copyAndAddAll(newList(f));
assertNull(lookup.fullName("foo"));
assertNull(lookup.indexName("bar"));
FieldMappers mappers = lookup2.fullName("foo");
assertNotNull(mappers);
assertEquals(1, mappers.mappers().size());
assertEquals(f, mappers.mapper());
mappers = lookup2.indexName("bar");
assertNotNull(mappers);
assertEquals(1, mappers.mappers().size());
assertEquals(f, mappers.mapper());
assertEquals(1, Iterators.size(lookup2.iterator()));
}
public void testExtendField() {
FieldMappersLookup lookup = new FieldMappersLookup();
FakeFieldMapper f = new FakeFieldMapper("foo", "bar");
FakeFieldMapper other = new FakeFieldMapper("blah", "blah");
lookup = lookup.copyAndAddAll(newList(f, other));
FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar");
FieldMappersLookup lookup2 = lookup.copyAndAddAll(newList(f2));
FieldMappers mappers = lookup2.fullName("foo");
assertNotNull(mappers);
assertEquals(2, mappers.mappers().size());
mappers = lookup2.indexName("bar");
assertNotNull(mappers);
assertEquals(2, mappers.mappers().size());
assertEquals(3, Iterators.size(lookup2.iterator()));
}
public void testIndexName() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "foo");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(newList(f1));
FieldMappers mappers = lookup.indexName("foo");
assertNotNull(mappers);
assertEquals(1, mappers.mappers().size());
assertEquals(f1, mappers.mapper());
}
public void testSimpleMatchIndexNames() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(newList(f1, f2));
Collection<String> names = lookup.simpleMatchToIndexNames("b*");
assertTrue(names.contains("baz"));
assertTrue(names.contains("boo"));
}
public void testSimpleMatchFullNames() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(newList(f1, f2));
Collection<String> names = lookup.simpleMatchToFullName("b*");
assertTrue(names.contains("foo"));
assertTrue(names.contains("bar"));
}
public void testSmartName() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "realfoo");
FakeFieldMapper f2 = new FakeFieldMapper("foo", "realbar");
FakeFieldMapper f3 = new FakeFieldMapper("baz", "realfoo");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(newList(f1, f2, f3));
assertNotNull(lookup.smartName("foo"));
assertEquals(2, lookup.smartName("foo").mappers().size());
assertNotNull(lookup.smartName("realfoo"));
assertEquals(f1, lookup.smartNameFieldMapper("foo"));
assertEquals(f2, lookup.smartNameFieldMapper("realbar"));
}
public void testIteratorImmutable() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(newList(f1));
try {
Iterator<FieldMapper> itr = lookup.iterator();
assertTrue(itr.hasNext());
assertEquals(f1, itr.next());
itr.remove();
fail("remove should have failed");
} catch (UnsupportedOperationException e) {
// expected
}
}
public void testGetMapper() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
FieldMappersLookup lookup = new FieldMappersLookup();
lookup = lookup.copyAndAddAll(newList(f1));
assertEquals(f1, lookup.get("foo"));
assertNull(lookup.get("bar")); // get is only by full name
FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo");
lookup = lookup.copyAndAddAll(newList(f2));
try {
lookup.get("foo");
fail("get should have enforced foo is unique");
} catch (IllegalStateException e) {
// expected
}
}
static List<FieldMapper> newList(FieldMapper... mapper) {
return Lists.newArrayList(mapper);
}
// this sucks how much must be overriden just do get a dummy field mapper...
static class FakeFieldMapper extends AbstractFieldMapper {
static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
public FakeFieldMapper(String fullName, String indexName) {
super(makeFieldType(fullName, indexName), null, null, dummySettings, null, null);
}
static MappedFieldType makeFieldType(String fullName, String indexName) {
MappedFieldType fieldType = Defaults.FIELD_TYPE.clone();
fieldType.setNames(new MappedFieldType.Names(fullName, indexName, indexName, fullName));
return fieldType;
}
@Override
public MappedFieldType defaultFieldType() { return null; }
@Override
public FieldDataType defaultFieldDataType() { return null; }
@Override
protected String contentType() { return null; }
@Override
protected void parseCreateField(ParseContext context, List list) throws IOException {}
}
}

View File

@ -0,0 +1,212 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.test.ElasticsearchTestCase;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
public class FieldTypeLookupTests extends ElasticsearchTestCase {
public void testEmpty() {
FieldTypeLookup lookup = new FieldTypeLookup();
assertNull(lookup.get("foo"));
assertNull(lookup.getByIndexName("foo"));
Collection<String> names = lookup.simpleMatchToFullName("foo");
assertNotNull(names);
assertTrue(names.isEmpty());
names = lookup.simpleMatchToIndexNames("foo");
assertNotNull(names);
assertTrue(names.isEmpty());
Iterator<MappedFieldType> itr = lookup.iterator();
assertNotNull(itr);
assertFalse(itr.hasNext());
}
public void testAddNewField() {
FieldTypeLookup lookup = new FieldTypeLookup();
FakeFieldMapper f = new FakeFieldMapper("foo", "bar");
FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f));
assertNull(lookup.get("foo"));
assertNull(lookup.get("bar"));
assertNull(lookup.getByIndexName("foo"));
assertNull(lookup.getByIndexName("bar"));
assertEquals(f.fieldType(), lookup2.get("foo"));
assertNull(lookup.get("bar"));
assertEquals(f.fieldType(), lookup2.getByIndexName("bar"));
assertNull(lookup.getByIndexName("foo"));
assertEquals(1, Iterators.size(lookup2.iterator()));
}
public void testAddExistingField() {
FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
MappedFieldType originalFieldType = f.fieldType();
FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo");
FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll(newList(f));
FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f2));
assertNotSame(originalFieldType, f.fieldType());
assertSame(f.fieldType(), f2.fieldType());
assertSame(f.fieldType(), lookup2.get("foo"));
assertSame(f.fieldType(), lookup2.getByIndexName("foo"));
assertEquals(1, Iterators.size(lookup2.iterator()));
}
public void testAddExistingIndexName() {
FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "foo");
MappedFieldType originalFieldType = f.fieldType();
FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll(newList(f));
FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f2));
assertNotSame(originalFieldType, f.fieldType());
assertSame(f.fieldType(), f2.fieldType());
assertSame(f.fieldType(), lookup2.get("foo"));
assertSame(f.fieldType(), lookup2.get("bar"));
assertSame(f.fieldType(), lookup2.getByIndexName("foo"));
assertEquals(2, Iterators.size(lookup2.iterator()));
}
public void testAddExistingFullName() {
FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar");
MappedFieldType originalFieldType = f.fieldType();
FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll(newList(f));
FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f2));
assertNotSame(originalFieldType, f.fieldType());
assertSame(f.fieldType(), f2.fieldType());
assertSame(f.fieldType(), lookup2.get("foo"));
assertSame(f.fieldType(), lookup2.getByIndexName("foo"));
assertSame(f.fieldType(), lookup2.getByIndexName("bar"));
assertEquals(1, Iterators.size(lookup2.iterator()));
}
public void testAddExistingBridgeName() {
FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "bar");
FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll(newList(f, f2));
try {
FakeFieldMapper f3 = new FakeFieldMapper("foo", "bar");
lookup.copyAndAddAll(newList(f3));
} catch (IllegalStateException e) {
assertTrue(e.getMessage().contains("insane mappings"));
}
try {
FakeFieldMapper f3 = new FakeFieldMapper("bar", "foo");
lookup.copyAndAddAll(newList(f3));
} catch (IllegalStateException e) {
assertTrue(e.getMessage().contains("insane mappings"));
}
}
// TODO: add tests for validation
public void testSimpleMatchIndexNames() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo");
FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll(newList(f1, f2));
Collection<String> names = lookup.simpleMatchToIndexNames("b*");
assertTrue(names.contains("baz"));
assertTrue(names.contains("boo"));
}
public void testSimpleMatchFullNames() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz");
FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo");
FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll(newList(f1, f2));
Collection<String> names = lookup.simpleMatchToFullName("b*");
assertTrue(names.contains("foo"));
assertTrue(names.contains("bar"));
}
public void testIteratorImmutable() {
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll(newList(f1));
try {
Iterator<MappedFieldType> itr = lookup.iterator();
assertTrue(itr.hasNext());
assertEquals(f1.fieldType(), itr.next());
itr.remove();
fail("remove should have failed");
} catch (UnsupportedOperationException e) {
// expected
}
}
static List<FieldMapper> newList(FieldMapper... mapper) {
return Lists.newArrayList(mapper);
}
// this sucks how much must be overridden just do get a dummy field mapper...
static class FakeFieldMapper extends AbstractFieldMapper {
static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
public FakeFieldMapper(String fullName, String indexName) {
super(makeFieldType(fullName, indexName), null, null, dummySettings, null, null);
}
static MappedFieldType makeFieldType(String fullName, String indexName) {
FakeFieldType fieldType = new FakeFieldType();
fieldType.setNames(new MappedFieldType.Names(fullName, indexName, indexName, fullName));
return fieldType;
}
static class FakeFieldType extends MappedFieldType {
public FakeFieldType() {}
protected FakeFieldType(FakeFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new FakeFieldType(this);
}
@Override
public String typeName() {
return "faketype";
}
}
@Override
public MappedFieldType defaultFieldType() { return null; }
@Override
public FieldDataType defaultFieldDataType() { return null; }
@Override
protected String contentType() { return null; }
@Override
protected void parseCreateField(ParseContext context, List list) throws IOException {}
}
}

View File

@ -24,12 +24,21 @@ import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.similarity.BM25SimilarityProvider;
import org.elasticsearch.test.ElasticsearchTestCase;
import java.util.ArrayList;
import java.util.List;
/** Base test case for subclasses of MappedFieldType */
public abstract class FieldTypeTestCase extends ElasticsearchTestCase {
/** Create a default constructed fieldtype */
protected abstract MappedFieldType createDefaultFieldType();
MappedFieldType createNamedDefaultFieldType(String name) {
MappedFieldType fieldType = createDefaultFieldType();
fieldType.setNames(new MappedFieldType.Names(name));
return fieldType;
}
/** A dummy null value to use when modifying null value */
protected Object dummyNullValue() {
return "dummyvalue";
@ -79,7 +88,7 @@ public abstract class FieldTypeTestCase extends ElasticsearchTestCase {
}
public void testClone() {
MappedFieldType fieldType = createDefaultFieldType();
MappedFieldType fieldType = createNamedDefaultFieldType("foo");
MappedFieldType clone = fieldType.clone();
assertNotSame(clone, fieldType);
assertEquals(clone.getClass(), fieldType.getClass());
@ -87,7 +96,7 @@ public abstract class FieldTypeTestCase extends ElasticsearchTestCase {
assertEquals(clone, clone.clone()); // transitivity
for (int i = 0; i < numProperties(); ++i) {
fieldType = createDefaultFieldType();
fieldType = createNamedDefaultFieldType("foo");
modifyProperty(fieldType, i);
clone = fieldType.clone();
assertNotSame(clone, fieldType);
@ -96,15 +105,15 @@ public abstract class FieldTypeTestCase extends ElasticsearchTestCase {
}
public void testEquals() {
MappedFieldType ft1 = createDefaultFieldType();
MappedFieldType ft2 = createDefaultFieldType();
MappedFieldType ft1 = createNamedDefaultFieldType("foo");
MappedFieldType ft2 = createNamedDefaultFieldType("foo");
assertEquals(ft1, ft1); // reflexive
assertEquals(ft1, ft2); // symmetric
assertEquals(ft2, ft1);
assertEquals(ft1.hashCode(), ft2.hashCode());
for (int i = 0; i < numProperties(); ++i) {
ft2 = createDefaultFieldType();
ft2 = createNamedDefaultFieldType("foo");
modifyProperty(ft2, i);
assertNotEquals(ft1, ft2);
assertNotEquals(ft1.hashCode(), ft2.hashCode());
@ -113,7 +122,7 @@ public abstract class FieldTypeTestCase extends ElasticsearchTestCase {
public void testFreeze() {
for (int i = 0; i < numProperties(); ++i) {
MappedFieldType fieldType = createDefaultFieldType();
MappedFieldType fieldType = createNamedDefaultFieldType("foo");
fieldType.freeze();
try {
modifyProperty(fieldType, i);
@ -123,4 +132,36 @@ public abstract class FieldTypeTestCase extends ElasticsearchTestCase {
}
}
}
public void testCheckTypeName() {
final MappedFieldType fieldType = createNamedDefaultFieldType("foo");
List<String> conflicts = new ArrayList<>();
fieldType.checkTypeName(fieldType, conflicts);
assertTrue(conflicts.toString(), conflicts.isEmpty());
MappedFieldType bogus = new MappedFieldType() {
@Override
public MappedFieldType clone() {return null;}
@Override
public String typeName() { return fieldType.typeName();}
};
try {
fieldType.checkTypeName(bogus, conflicts);
fail("expected bad types exception");
} catch (IllegalStateException e) {
assertTrue(e.getMessage().contains("Type names equal"));
}
assertTrue(conflicts.toString(), conflicts.isEmpty());
MappedFieldType other = new MappedFieldType() {
@Override
public MappedFieldType clone() {return null;}
@Override
public String typeName() { return "othertype";}
};
fieldType.checkTypeName(other, conflicts);
assertFalse(conflicts.isEmpty());
assertTrue(conflicts.get(0).contains("cannot be changed from type"));
assertEquals(1, conflicts.size());
}
}

View File

@ -233,11 +233,11 @@ public class CopyToMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper docMapperAfter = parser.parse(mappingAfter);
MergeResult mergeResult = docMapperBefore.merge(docMapperAfter.mapping(), true);
MergeResult mergeResult = docMapperBefore.merge(docMapperAfter.mapping(), true, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapperBefore.merge(docMapperAfter.mapping(), false);
docMapperBefore.merge(docMapperAfter.mapping(), false, false);
fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields();

View File

@ -64,12 +64,12 @@ public class TokenCountFieldMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
MergeResult mergeResult = stage1.merge(stage2.mapping(), true);
MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
// Just simulated so merge hasn't happened yet
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword"));
mergeResult = stage1.merge(stage2.mapping(), false);
mergeResult = stage1.merge(stage2.mapping(), false, false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
// Just simulated so merge hasn't happened yet
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard"));

View File

@ -352,7 +352,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
Map<String, String> config = getConfigurationViaXContent(initialDateFieldMapper);
assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy"));
MergeResult mergeResult = defaultMapper.merge(mergeMapper.mapping(), false);
MergeResult mergeResult = defaultMapper.merge(mergeMapper.mapping(), false, false);
assertThat("Merging resulting in conflicts: " + Arrays.asList(mergeResult.buildConflicts()), mergeResult.hasConflicts(), is(false));
assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class)));

View File

@ -80,7 +80,7 @@ public class ExternalMapper extends AbstractFieldMapper {
private String mapperName;
public Builder(String name, String generatedValue, String mapperName) {
super(name, Defaults.FIELD_TYPE);
super(name, new ExternalFieldType());
this.builder = this;
this.stringBuilder = stringField(name).store(false);
this.generatedValue = generatedValue;
@ -142,6 +142,25 @@ public class ExternalMapper extends AbstractFieldMapper {
}
}
static class ExternalFieldType extends MappedFieldType {
public ExternalFieldType() {}
protected ExternalFieldType(ExternalFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new ExternalFieldType(this);
}
@Override
public String typeName() {
return "faketype";
}
}
private final String generatedValue;
private final String mapperName;
@ -168,7 +187,7 @@ public class ExternalMapper extends AbstractFieldMapper {
@Override
public MappedFieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
return new ExternalFieldType();
}
@Override

View File

@ -486,7 +486,7 @@ public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
MergeResult mergeResult = stage1.merge(stage2.mapping(), false);
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts().length, equalTo(2));
// todo better way of checking conflict?
@ -498,7 +498,7 @@ public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
.field("validate", true).field("normalize", true).endObject().endObject()
.endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2.mapping(), false);
mergeResult = stage1.merge(stage2.mapping(), false, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
}
}

View File

@ -20,6 +20,8 @@ package org.elasticsearch.index.mapper.geo;
import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
public class GeoPointFieldTypeTests extends FieldTypeTestCase {
@Override
@ -36,8 +38,8 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
protected void modifyProperty(MappedFieldType ft, int propNum) {
GeoPointFieldMapper.GeoPointFieldType gft = (GeoPointFieldMapper.GeoPointFieldType)ft;
switch (propNum) {
case 0: gft.setGeohashEnabled(new MappedFieldType(), 1, true); break;
case 1: gft.setLatLonEnabled(new MappedFieldType(), new MappedFieldType()); break;
case 0: gft.setGeohashEnabled(new StringFieldMapper.StringFieldType(), 1, true); break;
case 1: gft.setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); break;
case 2: gft.setValidateLon(!gft.validateLon()); break;
case 3: gft.setValidateLat(!gft.validateLat()); break;
case 4: gft.setNormalizeLon(!gft.normalizeLon()); break;

View File

@ -337,7 +337,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest {
.field("orientation", "cw").endObject().endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
MergeResult mergeResult = stage1.merge(stage2.mapping(), false);
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
// check correct conflicts
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts().length, equalTo(4));
@ -365,7 +365,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m")
.field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2.mapping(), false);
mergeResult = stage1.merge(stage2.mapping(), false, false);
// verify mapping changes, and ensure no failures
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));

View File

@ -99,7 +99,7 @@ public class IndexTypeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper mapperDisabled = parser.parse(mappingWithIndexDisabled);
mapperEnabled.merge(mapperDisabled.mapping(), false);
mapperEnabled.merge(mapperDisabled.mapping(), false, false);
assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false));
}
@ -115,7 +115,7 @@ public class IndexTypeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
enabledMapper.merge(disabledMapper.mapping(), false);
enabledMapper.merge(disabledMapper.mapping(), false, false);
assertThat(enabledMapper.indexMapper().enabled(), is(false));
}

View File

@ -176,11 +176,11 @@ public class FieldNamesFieldMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper mapperEnabled = parser.parse(enabledMapping);
DocumentMapper mapperDisabled = parser.parse(disabledMapping);
mapperEnabled.merge(mapperDisabled.mapping(), false);
mapperEnabled.merge(mapperDisabled.mapping(), false, false);
assertFalse(mapperEnabled.rootMapper(FieldNamesFieldMapper.class).fieldType().isEnabled());
mapperEnabled = parser.parse(enabledMapping);
mapperDisabled.merge(mapperEnabled.mapping(), false);
mapperDisabled.merge(mapperEnabled.mapping(), false, false);
assertTrue(mapperEnabled.rootMapper(FieldNamesFieldMapper.class).fieldType().isEnabled());
}
}

View File

@ -45,12 +45,8 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
/**
*
*/
public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
@Test
public void test1Merge() throws Exception {
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
@ -65,13 +61,13 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
MergeResult mergeResult = stage1.merge(stage2.mapping(), true);
MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
// since we are simulating, we should not have the age mapping
assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue());
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue());
// now merge, don't simulate
mergeResult = stage1.merge(stage2.mapping(), false);
mergeResult = stage1.merge(stage2.mapping(), false, false);
// there is still merge failures
assertThat(mergeResult.hasConflicts(), equalTo(false));
// but we have the age in
@ -79,7 +75,6 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue());
}
@Test
public void testMergeObjectDynamic() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject().string();
@ -90,12 +85,11 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper withDynamicMapper = parser.parse(withDynamicMapping);
assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
MergeResult mergeResult = mapper.merge(withDynamicMapper.mapping(), false);
MergeResult mergeResult = mapper.merge(withDynamicMapper.mapping(), false, false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(mapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
}
@Test
public void testMergeObjectAndNested() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String objectMapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
@ -107,17 +101,16 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().endObject().string();
DocumentMapper nestedMapper = parser.parse(nestedMapping);
MergeResult mergeResult = objectMapper.merge(nestedMapper.mapping(), true);
MergeResult mergeResult = objectMapper.merge(nestedMapper.mapping(), true, false);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts().length, equalTo(1));
assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from non-nested to nested"));
mergeResult = nestedMapper.merge(objectMapper.mapping(), true);
mergeResult = nestedMapper.merge(objectMapper.mapping(), true, false);
assertThat(mergeResult.buildConflicts().length, equalTo(1));
assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from nested to non-nested"));
}
@Test
public void testMergeSearchAnalyzer() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
@ -131,13 +124,12 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper changed = parser.parse(mapping2);
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
MergeResult mergeResult = existing.merge(changed.mapping(), false);
MergeResult mergeResult = existing.merge(changed.mapping(), false, false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword"));
}
@Test
public void testChangeSearchAnalyzerToDefault() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
@ -151,7 +143,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper changed = parser.parse(mapping2);
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
MergeResult mergeResult = existing.merge(changed.mapping(), false);
MergeResult mergeResult = existing.merge(changed.mapping(), false, false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard"));
@ -160,12 +152,12 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
public void testConcurrentMergeTest() throws Throwable {
final MapperService mapperService = createIndex("test").mapperService();
mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), true);
mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), true, false);
final DocumentMapper documentMapper = mapperService.documentMapper("test");
DocumentFieldMappers dfm = documentMapper.mappers();
try {
((FieldNameAnalyzer) dfm.indexAnalyzer()).getWrappedAnalyzer("non_existing_field");
assertNotNull(dfm.indexAnalyzer().tokenStream("non_existing_field", "foo"));
fail();
} catch (IllegalArgumentException e) {
// ok that's expected
@ -186,7 +178,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
Mapping update = doc.dynamicMappingsUpdate();
assert update != null;
lastIntroducedFieldName.set(fieldName);
mapperService.merge("test", new CompressedXContent(update.toString()), false);
mapperService.merge("test", new CompressedXContent(update.toString()), false, false);
}
} catch (Throwable t) {
error.set(t);
@ -207,7 +199,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
continue;
}
dfm = documentMapper.mappers();
((FieldNameAnalyzer) dfm.indexAnalyzer()).getWrappedAnalyzer(fieldName);
assertNotNull(dfm.indexAnalyzer().tokenStream(fieldName, "foo"));
}
} finally {
stopped.set(true);

View File

@ -155,7 +155,7 @@ public class MultiFieldTests extends ElasticsearchSingleNodeTest {
stringField("name").store(true)
.addMultiField(stringField("indexed").index(true).tokenized(true))
.addMultiField(stringField("not_indexed").index(false).store(true))
)).build(indexService.mapperService(), mapperParser);
), indexService.mapperService()).build(indexService.mapperService(), mapperParser);
String builtMapping = builderDocMapper.mappingSource().string();
// System.out.println(builtMapping);

View File

@ -62,10 +62,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json");
DocumentMapper docMapper2 = parser.parse(mapping);
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true);
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper2.mapping(), false);
docMapper.merge(docMapper2.mapping(), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -84,10 +84,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json");
DocumentMapper docMapper3 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper3.mapping(), true);
mergeResult = docMapper.merge(docMapper3.mapping(), true, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper3.mapping(), false);
docMapper.merge(docMapper3.mapping(), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -100,10 +100,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json");
DocumentMapper docMapper4 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper4.mapping(), true);
mergeResult = docMapper.merge(docMapper4.mapping(), true, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper4.mapping(), false);
docMapper.merge(docMapper4.mapping(), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -135,10 +135,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json");
DocumentMapper docMapper2 = parser.parse(mapping);
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true);
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper2.mapping(), false);
docMapper.merge(docMapper2.mapping(), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -157,10 +157,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json");
DocumentMapper docMapper3 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper3.mapping(), true);
mergeResult = docMapper.merge(docMapper3.mapping(), true, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper3.mapping(), false);
docMapper.merge(docMapper3.mapping(), false, false);
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
@ -173,12 +173,12 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json");
DocumentMapper docMapper4 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper4.mapping(), true);
mergeResult = docMapper.merge(docMapper4.mapping(), true, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different index values"));
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different store values"));
mergeResult = docMapper.merge(docMapper4.mapping(), false);
mergeResult = docMapper.merge(docMapper4.mapping(), false, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());

View File

@ -46,8 +46,8 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
DocumentMapper docMapper = doc("test", settings,
rootObject("person")
.add(object("name").add(stringField("first").store(true).index(false)))
).build(indexService.mapperService(), mapperParser);
.add(object("name").add(stringField("first").store(true).index(false))),
indexService.mapperService()).build(indexService.mapperService(), mapperParser);
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
Document doc = docMapper.parse("person", "1", json).rootDoc();
@ -124,8 +124,8 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
DocumentMapper docMapper = doc("test", settings,
rootObject("person")
.add(object("name").add(stringField("first").store(true).index(false)))
).build(indexService.mapperService(), mapperParser);
.add(object("name").add(stringField("first").store(true).index(false))),
indexService.mapperService()).build(indexService.mapperService(), mapperParser);
BytesReference json = new BytesArray("".getBytes(Charsets.UTF_8));
try {

View File

@ -112,7 +112,7 @@ public class SizeMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
enabledMapper.merge(disabledMapper.mapping(), false);
enabledMapper.merge(disabledMapper.mapping(), false, false);
assertThat(enabledMapper.SizeFieldMapper().enabled(), is(false));
}
}

View File

@ -200,7 +200,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
MapperService mapperService = createIndex("test").mapperService();
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true);
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true, false);
DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").v1();
assertThat(mapper.type(), equalTo("my_type"));
@ -213,12 +213,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
MapperService mapperService = createIndex("test").mapperService();
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true);
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true, false);
String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type")
.startObject("_source").field("enabled", true).endObject()
.endObject().endObject().string();
mapperService.merge("my_type", new CompressedXContent(mapping), true);
mapperService.merge("my_type", new CompressedXContent(mapping), true, false);
DocumentMapper mapper = mapperService.documentMapper("my_type");
assertThat(mapper.type(), equalTo("my_type"));
@ -228,7 +228,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
void assertConflicts(String mapping1, String mapping2, DocumentMapperParser parser, String... conflicts) throws IOException {
DocumentMapper docMapper = parser.parse(mapping1);
docMapper = parser.parse(docMapper.mappingSource().string());
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true);
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true, false);
List<String> expectedConflicts = new ArrayList<>(Arrays.asList(conflicts));
for (String conflict : mergeResult.buildConflicts()) {

View File

@ -502,7 +502,7 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest {
String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject()
.endObject().endObject().endObject().endObject().string();
MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), false);
MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), false, false);
assertFalse(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts());
doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
@ -517,7 +517,7 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest {
updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject()
.endObject().endObject().endObject().endObject().string();
mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), true);
mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false);
assertTrue(mergeResult.hasConflicts());
assertEquals(1, mergeResult.buildConflicts().length);
assertTrue(mergeResult.buildConflicts()[0].contains("cannot enable norms"));

View File

@ -154,7 +154,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
enabledMapper.merge(disabledMapper.mapping(), false);
enabledMapper.merge(disabledMapper.mapping(), false, false);
assertThat(enabledMapper.timestampFieldMapper().enabled(), is(false));
}
@ -518,7 +518,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject()
.endObject().endObject().string();
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), false);
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), false, false);
assertThat(mergeResult.buildConflicts().length, equalTo(0));
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER));
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array"));
@ -586,7 +586,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.endObject()
.endObject().endObject().string();
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true);
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false);
List<String> expectedConflicts = new ArrayList<>(Arrays.asList(
"mapper [_timestamp] has different index values",
"mapper [_timestamp] has different store values",
@ -625,7 +625,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.endObject()
.endObject().endObject().string();
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true);
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false);
List<String> expectedConflicts = new ArrayList<>();
expectedConflicts.add("mapper [_timestamp] has different index values");
expectedConflicts.add("mapper [_timestamp] has different tokenize values");
@ -685,7 +685,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
void assertConflict(String mapping1, String mapping2, DocumentMapperParser parser, String conflict) throws IOException {
DocumentMapper docMapper = parser.parse(mapping1);
docMapper = parser.parse(docMapper.mappingSource().string());
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true);
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true, false);
assertThat(mergeResult.buildConflicts().length, equalTo(conflict == null ? 0 : 1));
if (conflict != null) {
assertThat(mergeResult.buildConflicts()[0], containsString(conflict));

View File

@ -119,7 +119,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper mapperWithoutTtl = parser.parse(mappingWithoutTtl);
DocumentMapper mapperWithTtl = parser.parse(mappingWithTtl);
MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl.mapping(), false);
MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(true));
@ -145,7 +145,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper initialMapper = parser.parse(mappingWithTtl);
DocumentMapper updatedMapper = parser.parse(updatedMapping);
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true);
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true, false);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
@ -159,7 +159,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper initialMapper = parser.parse(mappingWithTtl);
DocumentMapper updatedMapper = parser.parse(mappingWithTtlDisabled);
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true);
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true, false);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
@ -197,7 +197,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
public void testNoConflictIfNothingSetAndDisabledLater() throws Exception {
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d");
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean());
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean(), false);
assertFalse(mergeResult.hasConflicts());
}
@ -205,7 +205,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
public void testNoConflictIfNothingSetAndEnabledLater() throws Exception {
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean());
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean(), false);
assertFalse(mergeResult.hasConflicts());
}
@ -214,7 +214,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled);
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false);
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false);
assertFalse(mergeResult.hasConflicts());
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
@ -227,7 +227,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false);
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false);
assertFalse(mergeResult.hasConflicts());
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
@ -241,7 +241,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl);
CompressedXContent mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d");
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true);
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true, false);
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
@ -253,7 +253,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled();
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true);
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false);
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
@ -265,7 +265,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true);
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false);
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
@ -276,7 +276,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
mappingWithoutTtl = getMappingWithTtlDisabled("6d");
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false);
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false);
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
@ -286,7 +286,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
// check if switching simulate flag off works if nothing was applied in the beginning
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false);
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false);
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();

View File

@ -82,7 +82,7 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException {
IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping);
// simulate like in MetaDataMappingService#putMapping
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false);
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false, false);
// assure we have no conflicts
assertThat(mergeResult.buildConflicts().length, equalTo(0));
// make sure mappings applied
@ -106,7 +106,7 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping);
CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource();
// simulate like in MetaDataMappingService#putMapping
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true);
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true, false);
// assure we have conflicts
assertThat(mergeResult.buildConflicts().length, equalTo(1));
// make sure simulate flag actually worked - no mappings applied

View File

@ -57,7 +57,7 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin
MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedXContent(mapping), true);
mapperService.merge("person", new CompressedXContent(mapping), true, false);
ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();

View File

@ -59,7 +59,7 @@ public class IndexQueryParserFilterDateRangeTimezoneTests extends ElasticsearchS
MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedXContent(mapping), true);
mapperService.merge("person", new CompressedXContent(mapping), true, false);
ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();

View File

@ -92,7 +92,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedXContent(mapping), true);
mapperService.merge("person", new CompressedXContent(mapping), true, false);
ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();

View File

@ -64,8 +64,8 @@ public abstract class AbstractChildTests extends ElasticsearchSingleNodeTest {
MapperService mapperService = indexService.mapperService();
// Parent/child parsers require that the parent and child type to be presented in mapping
// Sometimes we want a nested object field in the parent type that triggers nonNestedDocsFilter to be used
mapperService.merge(parentType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true);
mapperService.merge(childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true);
mapperService.merge(parentType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true, false);
mapperService.merge(childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true, false);
return createSearchContext(indexService);
}

View File

@ -185,16 +185,13 @@ public class RecoveryPercolatorTests extends ElasticsearchIntegrationTest {
logger.info("--> Add dummy docs");
client().prepareIndex("test", "type1", "1").setSource("field1", 0).get();
client().prepareIndex("test", "type2", "1").setSource("field1", "0").get();
client().prepareIndex("test", "type2", "1").setSource("field1", 1).get();
logger.info("--> register a queries");
for (int i = 1; i <= 100; i++) {
client().prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i))
.setSource(jsonBuilder().startObject()
.field("query", rangeQuery("field1").from(0).to(i))
// The type must be set now, because two fields with the same name exist in different types.
// Setting the type to `type1`, makes sure that the range query gets parsed to a Lucene NumericRangeQuery.
.field("type", "type1")
.endObject())
.get();
}

View File

@ -117,7 +117,7 @@ public class NestedAggregatorTest extends ElasticsearchSingleNodeTest {
IndexSearcher searcher = new IndexSearcher(directoryReader);
IndexService indexService = createIndex("test");
indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true);
indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true, false);
SearchContext searchContext = createSearchContext(indexService);
AggregationContext context = new AggregationContext(searchContext);

Some files were not shown because too many files have changed in this diff Show More