Mappings: Restrict fields with the same name in different types to have the same core settings
We currently are very lax about allowing data types to conflict for the same field name, across document types. This change makes the underlying map in MapperService a 1-1 map of field name to field type, and throws exception when new types are not compatible. To still allow changing a type, with parameters that are allowed to be changed, but for a field that exists in multiple types, a new parameter to index creation and put mapping API is added: update_all_types. This defaults to false, and the exception messages suggest using this parameter when trying to modify a setting that is allowed to be modified but is being limited by this restriction. There are also a couple changes which try to base fields from new types for dynamic mappings, and root mappers, on existing settings. For dynamic mappings this is important if the dynamic defaults have been changed. For root mappings, this is mostly just for backcompat when pre 2.0 root mappers could have their field type changed. fixes #8871
This commit is contained in:
parent
1df2d3015e
commit
adcc0683b8
|
@ -41,6 +41,7 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
|
|||
private final TransportMessage originalMessage;
|
||||
private final String cause;
|
||||
private final String index;
|
||||
private final boolean updateAllTypes;
|
||||
|
||||
private IndexMetaData.State state = IndexMetaData.State.OPEN;
|
||||
|
||||
|
@ -55,10 +56,11 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
|
|||
private final Set<ClusterBlock> blocks = Sets.newHashSet();
|
||||
|
||||
|
||||
CreateIndexClusterStateUpdateRequest(TransportMessage originalMessage, String cause, String index) {
|
||||
CreateIndexClusterStateUpdateRequest(TransportMessage originalMessage, String cause, String index, boolean updateAllTypes) {
|
||||
this.originalMessage = originalMessage;
|
||||
this.cause = cause;
|
||||
this.index = index;
|
||||
this.updateAllTypes = updateAllTypes;
|
||||
}
|
||||
|
||||
public CreateIndexClusterStateUpdateRequest settings(Settings settings) {
|
||||
|
@ -126,4 +128,8 @@ public class CreateIndexClusterStateUpdateRequest extends ClusterStateUpdateRequ
|
|||
public Set<ClusterBlock> blocks() {
|
||||
return blocks;
|
||||
}
|
||||
|
||||
public boolean updateAllTypes() {
|
||||
return updateAllTypes;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,6 +72,8 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
|
||||
private final Map<String, IndexMetaData.Custom> customs = newHashMap();
|
||||
|
||||
private boolean updateAllTypes = false;
|
||||
|
||||
CreateIndexRequest() {
|
||||
}
|
||||
|
||||
|
@ -433,6 +435,17 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
return this.customs;
|
||||
}
|
||||
|
||||
/** True if all fields that span multiple types should be updated, false otherwise */
|
||||
public boolean updateAllTypes() {
|
||||
return updateAllTypes;
|
||||
}
|
||||
|
||||
/** See {@link #updateAllTypes()} */
|
||||
public CreateIndexRequest updateAllTypes(boolean updateAllTypes) {
|
||||
this.updateAllTypes = updateAllTypes;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
@ -454,6 +467,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
for (int i = 0; i < aliasesSize; i++) {
|
||||
aliases.add(Alias.read(in));
|
||||
}
|
||||
updateAllTypes = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -477,5 +491,6 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
for (Alias alias : aliases) {
|
||||
alias.writeTo(out);
|
||||
}
|
||||
out.writeBoolean(updateAllTypes);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -243,4 +243,10 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder<Create
|
|||
request.source(source);
|
||||
return this;
|
||||
}
|
||||
|
||||
/** True if all fields that span multiple types should be updated, false otherwise */
|
||||
public CreateIndexRequestBuilder setUpdateAllTypes(boolean updateAllTypes) {
|
||||
request.updateAllTypes(updateAllTypes);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -71,7 +71,7 @@ public class TransportCreateIndexAction extends TransportMasterNodeAction<Create
|
|||
cause = "api";
|
||||
}
|
||||
|
||||
final CreateIndexClusterStateUpdateRequest updateRequest = new CreateIndexClusterStateUpdateRequest(request, cause, request.index())
|
||||
final CreateIndexClusterStateUpdateRequest updateRequest = new CreateIndexClusterStateUpdateRequest(request, cause, request.index(), request.updateAllTypes())
|
||||
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
|
||||
.settings(request.settings()).mappings(request.mappings())
|
||||
.aliases(request.aliases()).customs(request.customs());
|
||||
|
|
|
@ -30,6 +30,8 @@ public class PutMappingClusterStateUpdateRequest extends IndicesClusterStateUpda
|
|||
|
||||
private String source;
|
||||
|
||||
private boolean updateAllTypes = false;
|
||||
|
||||
PutMappingClusterStateUpdateRequest() {
|
||||
|
||||
}
|
||||
|
@ -51,4 +53,13 @@ public class PutMappingClusterStateUpdateRequest extends IndicesClusterStateUpda
|
|||
this.source = source;
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean updateAllTypes() {
|
||||
return updateAllTypes;
|
||||
}
|
||||
|
||||
public PutMappingClusterStateUpdateRequest updateAllTypes(boolean updateAllTypes) {
|
||||
this.updateAllTypes = updateAllTypes;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,6 +63,8 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
|
||||
private String source;
|
||||
|
||||
private boolean updateAllTypes = false;
|
||||
|
||||
PutMappingRequest() {
|
||||
}
|
||||
|
||||
|
@ -236,6 +238,17 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
return this;
|
||||
}
|
||||
|
||||
/** True if all fields that span multiple types should be updated, false otherwise */
|
||||
public boolean updateAllTypes() {
|
||||
return updateAllTypes;
|
||||
}
|
||||
|
||||
/** See {@link #updateAllTypes()} */
|
||||
public PutMappingRequest updateAllTypes(boolean updateAllTypes) {
|
||||
this.updateAllTypes = updateAllTypes;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
|
@ -243,6 +256,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||
type = in.readOptionalString();
|
||||
source = in.readString();
|
||||
updateAllTypes = in.readBoolean();
|
||||
readTimeout(in);
|
||||
}
|
||||
|
||||
|
@ -253,6 +267,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
indicesOptions.writeIndicesOptions(out);
|
||||
out.writeOptionalString(type);
|
||||
out.writeString(source);
|
||||
out.writeBoolean(updateAllTypes);
|
||||
writeTimeout(out);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -91,4 +91,10 @@ public class PutMappingRequestBuilder extends AcknowledgedRequestBuilder<PutMapp
|
|||
return this;
|
||||
}
|
||||
|
||||
/** True if all fields that span multiple types should be updated, false otherwise */
|
||||
public PutMappingRequestBuilder setUpdateAllTypes(boolean updateAllTypes) {
|
||||
request.updateAllTypes(updateAllTypes);
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -69,6 +69,7 @@ public class TransportPutMappingAction extends TransportMasterNodeAction<PutMapp
|
|||
PutMappingClusterStateUpdateRequest updateRequest = new PutMappingClusterStateUpdateRequest()
|
||||
.ackTimeout(request.timeout()).masterNodeTimeout(request.masterNodeTimeout())
|
||||
.indices(concreteIndices).type(request.type())
|
||||
.updateAllTypes(request.updateAllTypes())
|
||||
.source(request.source());
|
||||
|
||||
metaDataMappingService.putMapping(updateRequest, new ActionListener<ClusterStateUpdateResponse>() {
|
||||
|
|
|
@ -345,7 +345,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
// first, add the default mapping
|
||||
if (mappings.containsKey(MapperService.DEFAULT_MAPPING)) {
|
||||
try {
|
||||
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false);
|
||||
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(XContentFactory.jsonBuilder().map(mappings.get(MapperService.DEFAULT_MAPPING)).string()), false, request.updateAllTypes());
|
||||
} catch (Exception e) {
|
||||
removalReason = "failed on parsing default mapping on index creation";
|
||||
throw new MapperParsingException("mapping [" + MapperService.DEFAULT_MAPPING + "]", e);
|
||||
|
@ -357,7 +357,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
}
|
||||
try {
|
||||
// apply the default here, its the first time we parse it
|
||||
mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true);
|
||||
mapperService.merge(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string()), true, request.updateAllTypes());
|
||||
} catch (Exception e) {
|
||||
removalReason = "failed on parsing mappings on index creation";
|
||||
throw new MapperParsingException("mapping [" + entry.getKey() + "]", e);
|
||||
|
|
|
@ -101,11 +101,11 @@ public class MetaDataIndexAliasesService extends AbstractComponent {
|
|||
try {
|
||||
indexService = indicesService.createIndex(indexMetaData.index(), indexMetaData.settings(), clusterService.localNode().id());
|
||||
if (indexMetaData.mappings().containsKey(MapperService.DEFAULT_MAPPING)) {
|
||||
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.mappings().get(MapperService.DEFAULT_MAPPING).source(), false);
|
||||
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.mappings().get(MapperService.DEFAULT_MAPPING).source(), false, false);
|
||||
}
|
||||
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.mappings().values()) {
|
||||
MappingMetaData mappingMetaData = cursor.value;
|
||||
indexService.mapperService().merge(mappingMetaData.type(), mappingMetaData.source(), false);
|
||||
indexService.mapperService().merge(mappingMetaData.type(), mappingMetaData.source(), false, false);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn("[{}] failed to temporary create in order to apply alias action", e, indexMetaData.index());
|
||||
|
|
|
@ -193,7 +193,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
// only add the current relevant mapping (if exists)
|
||||
if (indexMetaData.mappings().containsKey(type)) {
|
||||
// don't apply the default mapping, it has been applied when the mapping was created
|
||||
indexService.mapperService().merge(type, indexMetaData.mappings().get(type).source(), false);
|
||||
indexService.mapperService().merge(type, indexMetaData.mappings().get(type).source(), false, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -264,7 +264,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
continue;
|
||||
}
|
||||
|
||||
DocumentMapper updatedMapper = indexService.mapperService().merge(type, mappingSource, false);
|
||||
DocumentMapper updatedMapper = indexService.mapperService().merge(type, mappingSource, false, true);
|
||||
processedRefreshes.add(type);
|
||||
|
||||
// if we end up with the same mapping as the original once, ignore
|
||||
|
@ -361,11 +361,11 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
indicesToClose.add(indexMetaData.index());
|
||||
// make sure to add custom default mapping if exists
|
||||
if (indexMetaData.mappings().containsKey(MapperService.DEFAULT_MAPPING)) {
|
||||
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.mappings().get(MapperService.DEFAULT_MAPPING).source(), false);
|
||||
indexService.mapperService().merge(MapperService.DEFAULT_MAPPING, indexMetaData.mappings().get(MapperService.DEFAULT_MAPPING).source(), false, request.updateAllTypes());
|
||||
}
|
||||
// only add the current relevant mapping (if exists)
|
||||
if (indexMetaData.mappings().containsKey(request.type())) {
|
||||
indexService.mapperService().merge(request.type(), indexMetaData.mappings().get(request.type()).source(), false);
|
||||
indexService.mapperService().merge(request.type(), indexMetaData.mappings().get(request.type()).source(), false, request.updateAllTypes());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -383,7 +383,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
newMapper = indexService.mapperService().parse(request.type(), new CompressedXContent(request.source()), existingMapper == null);
|
||||
if (existingMapper != null) {
|
||||
// first, simulate
|
||||
MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true);
|
||||
MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), true, request.updateAllTypes());
|
||||
// if we have conflicts, throw an exception
|
||||
if (mergeResult.hasConflicts()) {
|
||||
throw new MergeMappingException(mergeResult.buildConflicts());
|
||||
|
@ -438,7 +438,7 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
if (existingMappers.containsKey(entry.getKey())) {
|
||||
existingSource = existingMappers.get(entry.getKey()).mappingSource();
|
||||
}
|
||||
DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false);
|
||||
DocumentMapper mergedMapper = indexService.mapperService().merge(newMapper.type(), newMapper.mappingSource(), false, request.updateAllTypes());
|
||||
CompressedXContent updatedSource = mergedMapper.mappingSource();
|
||||
|
||||
if (existingSource != null) {
|
||||
|
|
|
@ -75,9 +75,20 @@ public abstract class FieldsVisitor extends StoredFieldVisitor {
|
|||
|
||||
public void postProcess(DocumentMapper documentMapper) {
|
||||
for (Map.Entry<String, List<Object>> entry : fields().entrySet()) {
|
||||
FieldMapper fieldMapper = documentMapper.mappers().indexName(entry.getKey()).mapper();
|
||||
String indexName = entry.getKey();
|
||||
FieldMapper fieldMapper = documentMapper.mappers().getMapper(indexName);
|
||||
if (fieldMapper == null) {
|
||||
continue;
|
||||
// it's possible index name doesn't match field name (legacy feature)
|
||||
for (FieldMapper mapper : documentMapper.mappers()) {
|
||||
if (mapper.fieldType().names().indexName().equals(indexName)) {
|
||||
fieldMapper = mapper;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (fieldMapper == null) {
|
||||
// no index name or full name found, so skip
|
||||
continue;
|
||||
}
|
||||
}
|
||||
List<Object> fieldValues = entry.getValue();
|
||||
for (int i = 0; i < fieldValues.size(); i++) {
|
||||
|
|
|
@ -22,32 +22,38 @@ package org.elasticsearch.index.mapper;
|
|||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Collections2;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.analysis.FieldNameAnalyzer;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public final class DocumentFieldMappers implements Iterable<FieldMapper> {
|
||||
|
||||
private final FieldMappersLookup fieldMappers;
|
||||
/** Full field name to mapper */
|
||||
private final CopyOnWriteHashMap<String, FieldMapper> fieldMappers;
|
||||
|
||||
private final FieldNameAnalyzer indexAnalyzer;
|
||||
private final FieldNameAnalyzer searchAnalyzer;
|
||||
private final FieldNameAnalyzer searchQuoteAnalyzer;
|
||||
|
||||
public DocumentFieldMappers(AnalysisService analysisService) {
|
||||
this(new FieldMappersLookup(), new FieldNameAnalyzer(analysisService.defaultIndexAnalyzer()),
|
||||
new FieldNameAnalyzer(analysisService.defaultSearchAnalyzer()),
|
||||
new FieldNameAnalyzer(analysisService.defaultSearchQuoteAnalyzer()));
|
||||
this(new CopyOnWriteHashMap<String, FieldMapper>(),
|
||||
new FieldNameAnalyzer(analysisService.defaultIndexAnalyzer()),
|
||||
new FieldNameAnalyzer(analysisService.defaultSearchAnalyzer()),
|
||||
new FieldNameAnalyzer(analysisService.defaultSearchQuoteAnalyzer()));
|
||||
}
|
||||
|
||||
private DocumentFieldMappers(FieldMappersLookup fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) {
|
||||
private DocumentFieldMappers(CopyOnWriteHashMap<String, FieldMapper> fieldMappers, FieldNameAnalyzer indexAnalyzer, FieldNameAnalyzer searchAnalyzer, FieldNameAnalyzer searchQuoteAnalyzer) {
|
||||
this.fieldMappers = fieldMappers;
|
||||
this.indexAnalyzer = indexAnalyzer;
|
||||
this.searchAnalyzer = searchAnalyzer;
|
||||
|
@ -55,7 +61,10 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
|
|||
}
|
||||
|
||||
public DocumentFieldMappers copyAndAllAll(Collection<FieldMapper> newMappers) {
|
||||
FieldMappersLookup fieldMappers = this.fieldMappers.copyAndAddAll(newMappers);
|
||||
CopyOnWriteHashMap<String, FieldMapper> map = this.fieldMappers;
|
||||
for (FieldMapper fieldMapper : newMappers) {
|
||||
map = map.copyAndPut(fieldMapper.fieldType().names().fullName(), fieldMapper);
|
||||
}
|
||||
FieldNameAnalyzer indexAnalyzer = this.indexAnalyzer.copyAndAddAll(Collections2.transform(newMappers, new Function<FieldMapper, Map.Entry<String, Analyzer>>() {
|
||||
@Override
|
||||
public Map.Entry<String, Analyzer> apply(FieldMapper input) {
|
||||
|
@ -74,22 +83,7 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
|
|||
return Maps.immutableEntry(input.fieldType().names().indexName(), (Analyzer)input.fieldType().searchQuoteAnalyzer());
|
||||
}
|
||||
}));
|
||||
return new DocumentFieldMappers(fieldMappers, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Looks up a field by its index name.
|
||||
*
|
||||
* Overriding index name for a field is no longer possibly, and only supported for backcompat.
|
||||
* This function first attempts to lookup the field by full name, and only when that fails,
|
||||
* does a full scan of all field mappers, collecting those with this index name.
|
||||
*
|
||||
* This will be removed in 3.0, once backcompat for overriding index name is removed.
|
||||
* @deprecated Use {@link #getMapper(String)}
|
||||
*/
|
||||
@Deprecated
|
||||
public FieldMappers indexName(String indexName) {
|
||||
return fieldMappers.indexName(indexName);
|
||||
return new DocumentFieldMappers(map, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer);
|
||||
}
|
||||
|
||||
/** Returns the mapper for the given field */
|
||||
|
@ -97,23 +91,29 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
|
|||
return fieldMappers.get(field);
|
||||
}
|
||||
|
||||
Collection<String> simpleMatchToIndexNames(String pattern) {
|
||||
return fieldMappers.simpleMatchToIndexNames(pattern);
|
||||
}
|
||||
|
||||
public Collection<String> simpleMatchToFullName(String pattern) {
|
||||
return fieldMappers.simpleMatchToFullName(pattern);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to find first based on fullName, then by indexName.
|
||||
*/
|
||||
FieldMappers smartName(String name) {
|
||||
return fieldMappers.smartName(name);
|
||||
Set<String> fields = Sets.newHashSet();
|
||||
for (FieldMapper fieldMapper : this) {
|
||||
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) {
|
||||
fields.add(fieldMapper.fieldType().names().fullName());
|
||||
} else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) {
|
||||
fields.add(fieldMapper.fieldType().names().fullName());
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
public FieldMapper smartNameFieldMapper(String name) {
|
||||
return fieldMappers.smartNameFieldMapper(name);
|
||||
FieldMapper fieldMapper = getMapper(name);
|
||||
if (fieldMapper != null) {
|
||||
return fieldMapper;
|
||||
}
|
||||
for (FieldMapper otherFieldMapper : this) {
|
||||
if (otherFieldMapper.fieldType().names().indexName().equals(name)) {
|
||||
return otherFieldMapper;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -145,6 +145,6 @@ public final class DocumentFieldMappers implements Iterable<FieldMapper> {
|
|||
}
|
||||
|
||||
public Iterator<FieldMapper> iterator() {
|
||||
return fieldMappers.iterator();
|
||||
return fieldMappers.values().iterator();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ import com.google.common.base.Function;
|
|||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
|
@ -32,7 +31,6 @@ import org.apache.lucene.search.Query;
|
|||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
@ -43,6 +41,7 @@ import org.elasticsearch.common.util.concurrent.ReleasableLock;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.Mapping.SourceTransform;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
|
||||
|
@ -96,28 +95,32 @@ public class DocumentMapper implements ToXContent {
|
|||
|
||||
private final Mapper.BuilderContext builderContext;
|
||||
|
||||
public Builder(String index, Settings indexSettings, RootObjectMapper.Builder builder) {
|
||||
public Builder(String index, Settings indexSettings, RootObjectMapper.Builder builder, MapperService mapperService) {
|
||||
this.index = index;
|
||||
this.indexSettings = indexSettings;
|
||||
this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1));
|
||||
this.rootObjectMapper = builder.build(builderContext);
|
||||
|
||||
// TODO: find a cleaner way to handle existing root mappings and using their field type as the default.
|
||||
// the vast majority of these root mappers only need the existing type for backwards compatibility, since
|
||||
// the pre 2.0 field type settings could be modified
|
||||
|
||||
// UID first so it will be the first stored field to load (so will benefit from "fields: []" early termination
|
||||
this.rootMappers.put(UidFieldMapper.class, new UidFieldMapper(indexSettings));
|
||||
this.rootMappers.put(IdFieldMapper.class, new IdFieldMapper(indexSettings));
|
||||
this.rootMappers.put(RoutingFieldMapper.class, new RoutingFieldMapper(indexSettings));
|
||||
this.rootMappers.put(UidFieldMapper.class, new UidFieldMapper(indexSettings, mapperService.fullName(UidFieldMapper.NAME)));
|
||||
this.rootMappers.put(IdFieldMapper.class, new IdFieldMapper(indexSettings, mapperService.fullName(IdFieldMapper.NAME)));
|
||||
this.rootMappers.put(RoutingFieldMapper.class, new RoutingFieldMapper(indexSettings, mapperService.fullName(RoutingFieldMapper.NAME)));
|
||||
// add default mappers, order is important (for example analyzer should come before the rest to set context.analyzer)
|
||||
this.rootMappers.put(SizeFieldMapper.class, new SizeFieldMapper(indexSettings));
|
||||
this.rootMappers.put(IndexFieldMapper.class, new IndexFieldMapper(indexSettings));
|
||||
this.rootMappers.put(SizeFieldMapper.class, new SizeFieldMapper(indexSettings, mapperService.fullName(SizeFieldMapper.NAME)));
|
||||
this.rootMappers.put(IndexFieldMapper.class, new IndexFieldMapper(indexSettings, mapperService.fullName(IndexFieldMapper.NAME)));
|
||||
this.rootMappers.put(SourceFieldMapper.class, new SourceFieldMapper(indexSettings));
|
||||
this.rootMappers.put(TypeFieldMapper.class, new TypeFieldMapper(indexSettings));
|
||||
this.rootMappers.put(AllFieldMapper.class, new AllFieldMapper(indexSettings));
|
||||
this.rootMappers.put(TimestampFieldMapper.class, new TimestampFieldMapper(indexSettings));
|
||||
this.rootMappers.put(TypeFieldMapper.class, new TypeFieldMapper(indexSettings, mapperService.fullName(TypeFieldMapper.NAME)));
|
||||
this.rootMappers.put(AllFieldMapper.class, new AllFieldMapper(indexSettings, mapperService.fullName(AllFieldMapper.NAME)));
|
||||
this.rootMappers.put(TimestampFieldMapper.class, new TimestampFieldMapper(indexSettings, mapperService.fullName(TimestampFieldMapper.NAME)));
|
||||
this.rootMappers.put(TTLFieldMapper.class, new TTLFieldMapper(indexSettings));
|
||||
this.rootMappers.put(VersionFieldMapper.class, new VersionFieldMapper(indexSettings));
|
||||
this.rootMappers.put(ParentFieldMapper.class, new ParentFieldMapper(indexSettings));
|
||||
this.rootMappers.put(ParentFieldMapper.class, new ParentFieldMapper(indexSettings, mapperService.fullName(ParentFieldMapper.NAME)));
|
||||
// _field_names last so that it can see all other fields
|
||||
this.rootMappers.put(FieldNamesFieldMapper.class, new FieldNamesFieldMapper(indexSettings));
|
||||
this.rootMappers.put(FieldNamesFieldMapper.class, new FieldNamesFieldMapper(indexSettings, mapperService.fullName(FieldNamesFieldMapper.NAME)));
|
||||
}
|
||||
|
||||
public Builder meta(ImmutableMap<String, Object> meta) {
|
||||
|
@ -393,87 +396,40 @@ public class DocumentMapper implements ToXContent {
|
|||
return DocumentParser.transformSourceAsMap(mapping, sourceAsMap);
|
||||
}
|
||||
|
||||
private void addFieldMappers(Collection<FieldMapper> fieldMappers) {
|
||||
assert mappingLock.isWriteLockedByCurrentThread();
|
||||
this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers);
|
||||
mapperService.addFieldMappers(fieldMappers);
|
||||
}
|
||||
|
||||
public boolean isParent(String type) {
|
||||
return mapperService.getParentTypes().contains(type);
|
||||
}
|
||||
|
||||
private void addObjectMappers(Collection<ObjectMapper> objectMappers) {
|
||||
private void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {
|
||||
assert mappingLock.isWriteLockedByCurrentThread();
|
||||
MapBuilder<String, ObjectMapper> builder = MapBuilder.newMapBuilder(this.objectMappers);
|
||||
for (ObjectMapper objectMapper : objectMappers) {
|
||||
builder.put(objectMapper.fullPath(), objectMapper);
|
||||
if (objectMapper.nested().isNested()) {
|
||||
hasNestedObjects = true;
|
||||
}
|
||||
// first ensure we don't have any incompatible new fields
|
||||
mapperService.checkNewMappersCompatibility(objectMappers, fieldMappers, true);
|
||||
|
||||
// update mappers for this document type
|
||||
MapBuilder<String, ObjectMapper> builder = MapBuilder.newMapBuilder(this.objectMappers);
|
||||
for (ObjectMapper objectMapper : objectMappers) {
|
||||
builder.put(objectMapper.fullPath(), objectMapper);
|
||||
if (objectMapper.nested().isNested()) {
|
||||
hasNestedObjects = true;
|
||||
}
|
||||
this.objectMappers = builder.immutableMap();
|
||||
mapperService.addObjectMappers(objectMappers);
|
||||
}
|
||||
|
||||
private MergeResult newMergeContext(boolean simulate) {
|
||||
return new MergeResult(simulate) {
|
||||
|
||||
final List<String> conflicts = new ArrayList<>();
|
||||
final List<FieldMapper> newFieldMappers = new ArrayList<>();
|
||||
final List<ObjectMapper> newObjectMappers = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public void addFieldMappers(Collection<FieldMapper> fieldMappers) {
|
||||
assert simulate() == false;
|
||||
newFieldMappers.addAll(fieldMappers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
|
||||
assert simulate() == false;
|
||||
newObjectMappers.addAll(objectMappers);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<FieldMapper> getNewFieldMappers() {
|
||||
return newFieldMappers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<ObjectMapper> getNewObjectMappers() {
|
||||
return newObjectMappers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addConflict(String mergeFailure) {
|
||||
conflicts.add(mergeFailure);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasConflicts() {
|
||||
return conflicts.isEmpty() == false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] buildConflicts() {
|
||||
return conflicts.toArray(Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
public MergeResult merge(Mapping mapping, boolean simulate) {
|
||||
try (ReleasableLock lock = mappingWriteLock.acquire()) {
|
||||
final MergeResult mergeResult = newMergeContext(simulate);
|
||||
this.mapping.merge(mapping, mergeResult);
|
||||
if (simulate == false) {
|
||||
addFieldMappers(mergeResult.getNewFieldMappers());
|
||||
addObjectMappers(mergeResult.getNewObjectMappers());
|
||||
refreshSource();
|
||||
}
|
||||
return mergeResult;
|
||||
this.objectMappers = builder.immutableMap();
|
||||
this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers);
|
||||
|
||||
// finally update for the entire index
|
||||
mapperService.addMappers(objectMappers, fieldMappers);
|
||||
}
|
||||
|
||||
public MergeResult merge(Mapping mapping, boolean simulate, boolean updateAllTypes) {
|
||||
try (ReleasableLock lock = mappingWriteLock.acquire()) {
|
||||
final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes);
|
||||
this.mapping.merge(mapping, mergeResult);
|
||||
if (simulate == false) {
|
||||
addMappers(mergeResult.getNewObjectMappers(), mergeResult.getNewFieldMappers());
|
||||
refreshSource();
|
||||
}
|
||||
return mergeResult;
|
||||
}
|
||||
}
|
||||
|
||||
private void refreshSource() throws ElasticsearchGenerationException {
|
||||
|
|
|
@ -167,7 +167,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
|
|||
}
|
||||
|
||||
public Mapper.TypeParser.ParserContext parserContext() {
|
||||
return new Mapper.TypeParser.ParserContext(analysisService, similarityLookupService, typeParsers, indexVersionCreated);
|
||||
return new Mapper.TypeParser.ParserContext(analysisService, similarityLookupService, mapperService, typeParsers, indexVersionCreated);
|
||||
}
|
||||
|
||||
public DocumentMapper parse(String source) throws MapperParsingException {
|
||||
|
@ -227,7 +227,7 @@ public class DocumentMapperParser extends AbstractIndexComponent {
|
|||
|
||||
Mapper.TypeParser.ParserContext parserContext = parserContext();
|
||||
// parse RootObjectMapper
|
||||
DocumentMapper.Builder docBuilder = doc(index.name(), indexSettings, (RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext));
|
||||
DocumentMapper.Builder docBuilder = doc(index.name(), indexSettings, (RootObjectMapper.Builder) rootObjectTypeParser.parse(type, mapping, parserContext), mapperService);
|
||||
Iterator<Map.Entry<String, Object>> iterator = mapping.entrySet().iterator();
|
||||
// parse DocumentMapper
|
||||
while(iterator.hasNext()) {
|
||||
|
|
|
@ -33,6 +33,13 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper.Builder;
|
||||
import org.elasticsearch.index.mapper.core.DateFieldMapper.DateFieldType;
|
||||
import org.elasticsearch.index.mapper.core.LongFieldMapper.LongFieldType;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper.StringFieldType;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
|
||||
|
@ -438,6 +445,172 @@ class DocumentParser implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
private static Mapper.Builder<?,?> createBuilderFromFieldType(final ParseContext context, MappedFieldType fieldType, String currentFieldName) {
|
||||
Mapper.Builder builder = null;
|
||||
if (fieldType instanceof StringFieldType) {
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.stringField(currentFieldName);
|
||||
}
|
||||
} else if (fieldType instanceof DateFieldType) {
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.dateField(currentFieldName);
|
||||
}
|
||||
} else if (fieldType.numericType() != null) {
|
||||
switch (fieldType.numericType()) {
|
||||
case LONG:
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.longField(currentFieldName);
|
||||
}
|
||||
case DOUBLE:
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.doubleField(currentFieldName);
|
||||
}
|
||||
break;
|
||||
case INT:
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.integerField(currentFieldName);
|
||||
}
|
||||
case FLOAT:
|
||||
builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.floatField(currentFieldName);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Unexpected numeric type " + fieldType.numericType());
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
private static Mapper.Builder<?,?> createBuilderFromDynamicValue(final ParseContext context, XContentParser.Token token, String currentFieldName) throws IOException {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
// do a quick test to see if its fits a dynamic template, if so, use it.
|
||||
// we need to do it here so we can handle things like attachment templates, where calling
|
||||
// text (to see if its a date) causes the binary value to be cleared
|
||||
{
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null);
|
||||
if (builder != null) {
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
if (context.root().dateDetection()) {
|
||||
String text = context.parser().text();
|
||||
// a safe check since "1" gets parsed as well
|
||||
if (Strings.countOccurrencesOf(text, ":") > 1 || Strings.countOccurrencesOf(text, "-") > 1 || Strings.countOccurrencesOf(text, "/") > 1) {
|
||||
for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
|
||||
try {
|
||||
dateTimeFormatter.parser().parseMillis(text);
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter);
|
||||
}
|
||||
return builder;
|
||||
} catch (Exception e) {
|
||||
// failure to parse this, continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (context.root().numericDetection()) {
|
||||
String text = context.parser().text();
|
||||
try {
|
||||
Long.parseLong(text);
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.longField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
} catch (Exception e) {
|
||||
// not a long number
|
||||
}
|
||||
try {
|
||||
Double.parseDouble(text);
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.doubleField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
} catch (Exception e) {
|
||||
// not a long number
|
||||
}
|
||||
}
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.stringField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
XContentParser.NumberType numberType = context.parser().numberType();
|
||||
if (numberType == XContentParser.NumberType.INT) {
|
||||
if (context.parser().estimatedNumberType()) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.longField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
} else {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.integerField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
} else if (numberType == XContentParser.NumberType.LONG) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.longField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
} else if (numberType == XContentParser.NumberType.FLOAT) {
|
||||
if (context.parser().estimatedNumberType()) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.doubleField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
} else {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.floatField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
} else if (numberType == XContentParser.NumberType.DOUBLE) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.doubleField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.booleanField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "binary");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.binaryField(currentFieldName);
|
||||
}
|
||||
return builder;
|
||||
} else {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null);
|
||||
if (builder != null) {
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
// TODO how do we identify dynamically that its a binary value?
|
||||
throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]");
|
||||
}
|
||||
|
||||
private static ObjectMapper parseDynamicValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException {
|
||||
ObjectMapper.Dynamic dynamic = parentMapper.dynamic();
|
||||
if (dynamic == null) {
|
||||
|
@ -449,140 +622,33 @@ class DocumentParser implements Closeable {
|
|||
if (dynamic == ObjectMapper.Dynamic.FALSE) {
|
||||
return null;
|
||||
}
|
||||
Mapper mapper = null;
|
||||
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
boolean resolved = false;
|
||||
|
||||
// do a quick test to see if its fits a dynamic template, if so, use it.
|
||||
// we need to do it here so we can handle things like attachment templates, where calling
|
||||
// text (to see if its a date) causes the binary value to be cleared
|
||||
if (!resolved) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null);
|
||||
if (builder != null) {
|
||||
mapper = builder.build(builderContext);
|
||||
resolved = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!resolved && context.root().dateDetection()) {
|
||||
String text = context.parser().text();
|
||||
// a safe check since "1" gets parsed as well
|
||||
if (Strings.countOccurrencesOf(text, ":") > 1 || Strings.countOccurrencesOf(text, "-") > 1 || Strings.countOccurrencesOf(text, "/") > 1) {
|
||||
for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
|
||||
try {
|
||||
dateTimeFormatter.parser().parseMillis(text);
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
resolved = true;
|
||||
break;
|
||||
} catch (Exception e) {
|
||||
// failure to parse this, continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!resolved && context.root().numericDetection()) {
|
||||
String text = context.parser().text();
|
||||
try {
|
||||
Long.parseLong(text);
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.longField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
resolved = true;
|
||||
} catch (Exception e) {
|
||||
// not a long number
|
||||
}
|
||||
if (!resolved) {
|
||||
try {
|
||||
Double.parseDouble(text);
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.doubleField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
resolved = true;
|
||||
} catch (Exception e) {
|
||||
// not a long number
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!resolved) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.stringField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
XContentParser.NumberType numberType = context.parser().numberType();
|
||||
if (numberType == XContentParser.NumberType.INT) {
|
||||
if (context.parser().estimatedNumberType()) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.longField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
} else {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.integerField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
}
|
||||
} else if (numberType == XContentParser.NumberType.LONG) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.longField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
} else if (numberType == XContentParser.NumberType.FLOAT) {
|
||||
if (context.parser().estimatedNumberType()) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.doubleField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
} else {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.floatField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
}
|
||||
} else if (numberType == XContentParser.NumberType.DOUBLE) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.doubleField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.booleanField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "binary");
|
||||
if (builder == null) {
|
||||
builder = MapperBuilders.binaryField(currentFieldName);
|
||||
}
|
||||
mapper = builder.build(builderContext);
|
||||
} else {
|
||||
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null);
|
||||
if (builder != null) {
|
||||
mapper = builder.build(builderContext);
|
||||
} else {
|
||||
// TODO how do we identify dynamically that its a binary value?
|
||||
throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]");
|
||||
final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
|
||||
final MappedFieldType existingFieldType = context.mapperService().fullName(context.path().fullPathAsText(currentFieldName));
|
||||
Mapper.Builder builder = null;
|
||||
if (existingFieldType != null) {
|
||||
// create a builder of the same type
|
||||
builder = createBuilderFromFieldType(context, existingFieldType, currentFieldName);
|
||||
}
|
||||
if (builder == null) {
|
||||
builder = createBuilderFromDynamicValue(context, token, currentFieldName);
|
||||
}
|
||||
if (existingFieldType != null) {
|
||||
// best-effort to not introduce a conflict
|
||||
if (builder instanceof StringFieldMapper.Builder) {
|
||||
StringFieldMapper.Builder stringBuilder = (StringFieldMapper.Builder) builder;
|
||||
stringBuilder.store(existingFieldType.stored());
|
||||
stringBuilder.indexOptions(existingFieldType.indexOptions());
|
||||
stringBuilder.omitNorms(existingFieldType.omitNorms());
|
||||
stringBuilder.docValues(existingFieldType.hasDocValues());
|
||||
} else if (builder instanceof NumberFieldMapper.Builder) {
|
||||
NumberFieldMapper.Builder<?,?> numberBuilder = (NumberFieldMapper.Builder<?, ?>) builder;
|
||||
numberBuilder.store(existingFieldType.stored());
|
||||
numberBuilder.indexOptions(existingFieldType.indexOptions());
|
||||
numberBuilder.omitNorms(existingFieldType.omitNorms());
|
||||
numberBuilder.docValues(existingFieldType.hasDocValues());
|
||||
}
|
||||
}
|
||||
Mapper mapper = builder.build(builderContext);
|
||||
|
||||
mapper = parseAndMergeUpdate(mapper, context);
|
||||
|
||||
|
@ -621,10 +687,9 @@ class DocumentParser implements Closeable {
|
|||
|
||||
/** Creates an copy of the current field with given field name and boost */
|
||||
private static void parseCopy(String field, ParseContext context) throws IOException {
|
||||
// TODO: this should not be indexName...
|
||||
FieldMappers mappers = context.docMapper().mappers().indexName(field);
|
||||
if (mappers != null && !mappers.isEmpty()) {
|
||||
mappers.mapper().parse(context);
|
||||
FieldMapper fieldMapper = context.docMapper().mappers().getMapper(field);
|
||||
if (fieldMapper != null) {
|
||||
fieldMapper.parse(context);
|
||||
} else {
|
||||
// The path of the dest field might be completely different from the current one so we need to reset it
|
||||
context = context.overridePath(new ContentPath(0));
|
||||
|
|
|
@ -32,6 +32,15 @@ public interface FieldMapper extends Mapper {
|
|||
|
||||
MappedFieldType fieldType();
|
||||
|
||||
/** Returns a reference to the MappedFieldType for this mapper. */
|
||||
MappedFieldTypeReference fieldTypeReference();
|
||||
|
||||
/**
|
||||
* Updates the reference to this field's MappedFieldType.
|
||||
* Implementations should assert equality of the underlying field type
|
||||
*/
|
||||
void setFieldTypeReference(MappedFieldTypeReference ref);
|
||||
|
||||
/**
|
||||
* List of fields where this field should be copied to
|
||||
*/
|
||||
|
|
|
@ -1,193 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* A class that holds a map of field mappers from name, index name, and full name.
|
||||
*/
|
||||
class FieldMappersLookup implements Iterable<FieldMapper> {
|
||||
|
||||
/** Full field name to mappers */
|
||||
private final CopyOnWriteHashMap<String, FieldMappers> mappers;
|
||||
|
||||
/** Create a new empty instance. */
|
||||
public FieldMappersLookup() {
|
||||
mappers = new CopyOnWriteHashMap<>();
|
||||
}
|
||||
|
||||
private FieldMappersLookup(CopyOnWriteHashMap<String, FieldMappers> map) {
|
||||
mappers = map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a new instance that contains the union of this instance and the provided mappers.
|
||||
*/
|
||||
public FieldMappersLookup copyAndAddAll(Collection<FieldMapper> newMappers) {
|
||||
CopyOnWriteHashMap<String, FieldMappers> map = this.mappers;
|
||||
|
||||
for (FieldMapper mapper : newMappers) {
|
||||
String key = mapper.fieldType().names().fullName();
|
||||
FieldMappers mappers = map.get(key);
|
||||
|
||||
if (mappers == null) {
|
||||
mappers = new FieldMappers(mapper);
|
||||
} else {
|
||||
mappers = mappers.concat(mapper);
|
||||
}
|
||||
map = map.copyAndPut(key, mappers);
|
||||
}
|
||||
return new FieldMappersLookup(map);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the field mappers based on the mapper index name.
|
||||
* NOTE: this only exists for backcompat support and if the index name
|
||||
* does not match it's field name, this is a linear time operation
|
||||
* @deprecated Use {@link #get(String)}
|
||||
*/
|
||||
@Deprecated
|
||||
public FieldMappers indexName(String indexName) {
|
||||
FieldMappers fieldMappers = fullName(indexName);
|
||||
if (fieldMappers != null) {
|
||||
if (fieldMappers.mapper().fieldType().names().indexName().equals(indexName)) {
|
||||
return fieldMappers;
|
||||
}
|
||||
}
|
||||
fieldMappers = new FieldMappers();
|
||||
for (FieldMapper mapper : this) {
|
||||
if (mapper.fieldType().names().indexName().equals(indexName)) {
|
||||
fieldMappers = fieldMappers.concat(mapper);
|
||||
}
|
||||
}
|
||||
if (fieldMappers.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return fieldMappers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the field mappers based on the mapper full name.
|
||||
*/
|
||||
public FieldMappers fullName(String fullName) {
|
||||
return mappers.get(fullName);
|
||||
}
|
||||
|
||||
/** Returns the mapper for the given field */
|
||||
public FieldMapper get(String field) {
|
||||
FieldMappers fieldMappers = mappers.get(field);
|
||||
if (fieldMappers == null) {
|
||||
return null;
|
||||
}
|
||||
if (fieldMappers.mappers().size() != 1) {
|
||||
throw new IllegalStateException("Mapper for field [" + field + "] should be unique");
|
||||
}
|
||||
return fieldMappers.mapper();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of the index names of a simple match regex like pattern against full name and index name.
|
||||
*/
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern) {
|
||||
Set<String> fields = Sets.newHashSet();
|
||||
for (FieldMapper fieldMapper : this) {
|
||||
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) {
|
||||
fields.add(fieldMapper.fieldType().names().indexName());
|
||||
} else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) {
|
||||
fields.add(fieldMapper.fieldType().names().indexName());
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of the full names of a simple match regex like pattern against full name and index name.
|
||||
*/
|
||||
public Collection<String> simpleMatchToFullName(String pattern) {
|
||||
Set<String> fields = Sets.newHashSet();
|
||||
for (FieldMapper fieldMapper : this) {
|
||||
if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().fullName())) {
|
||||
fields.add(fieldMapper.fieldType().names().fullName());
|
||||
} else if (Regex.simpleMatch(pattern, fieldMapper.fieldType().names().indexName())) {
|
||||
fields.add(fieldMapper.fieldType().names().fullName());
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to find first based on {@link #fullName(String)}, then by {@link #indexName(String)}.
|
||||
*/
|
||||
@Nullable
|
||||
FieldMappers smartName(String name) {
|
||||
FieldMappers fieldMappers = fullName(name);
|
||||
if (fieldMappers != null) {
|
||||
return fieldMappers;
|
||||
}
|
||||
return indexName(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to find first based on {@link #fullName(String)}, then by {@link #indexName(String)}
|
||||
* and return the first mapper for it (see {@link org.elasticsearch.index.mapper.FieldMappers#mapper()}).
|
||||
*/
|
||||
@Nullable
|
||||
public FieldMapper smartNameFieldMapper(String name) {
|
||||
FieldMappers fieldMappers = smartName(name);
|
||||
if (fieldMappers == null) {
|
||||
return null;
|
||||
}
|
||||
return fieldMappers.mapper();
|
||||
}
|
||||
|
||||
public Iterator<FieldMapper> iterator() {
|
||||
final Iterator<FieldMappers> fieldsItr = mappers.values().iterator();
|
||||
if (fieldsItr.hasNext() == false) {
|
||||
return Collections.emptyIterator();
|
||||
}
|
||||
return new Iterator<FieldMapper>() {
|
||||
Iterator<FieldMapper> fieldValuesItr = fieldsItr.next().iterator();
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return fieldsItr.hasNext() || fieldValuesItr.hasNext();
|
||||
}
|
||||
@Override
|
||||
public FieldMapper next() {
|
||||
if (fieldValuesItr.hasNext() == false && fieldsItr.hasNext()) {
|
||||
fieldValuesItr = fieldsItr.next().iterator();
|
||||
}
|
||||
return fieldValuesItr.next();
|
||||
}
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException("cannot remove field mapper from lookup");
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -0,0 +1,180 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.collect.Iterators;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.elasticsearch.common.collect.CopyOnWriteHashMap;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* An immutable container for looking up {@link MappedFieldType}s by their name.
|
||||
*/
|
||||
class FieldTypeLookup implements Iterable<MappedFieldType> {
|
||||
private static final Function<MappedFieldTypeReference, MappedFieldType> UNWRAPPER = new Function<MappedFieldTypeReference, MappedFieldType>() {
|
||||
@Override
|
||||
public MappedFieldType apply(MappedFieldTypeReference ref) {
|
||||
return ref.get();
|
||||
}
|
||||
};
|
||||
|
||||
/** Full field name to field type */
|
||||
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> fullNameToFieldType;
|
||||
|
||||
/** Index field name to field type */
|
||||
private final CopyOnWriteHashMap<String, MappedFieldTypeReference> indexNameToFieldType;
|
||||
|
||||
/** Create a new empty instance. */
|
||||
public FieldTypeLookup() {
|
||||
fullNameToFieldType = new CopyOnWriteHashMap<>();
|
||||
indexNameToFieldType = new CopyOnWriteHashMap<>();
|
||||
}
|
||||
|
||||
private FieldTypeLookup(CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName, CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName) {
|
||||
fullNameToFieldType = fullName;
|
||||
indexNameToFieldType = indexName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a new instance that contains the union of this instance and the field types
|
||||
* from the provided fields. If a field already exists, the field type will be updated
|
||||
* to use the new mappers field type.
|
||||
*/
|
||||
public FieldTypeLookup copyAndAddAll(Collection<FieldMapper> newFieldMappers) {
|
||||
CopyOnWriteHashMap<String, MappedFieldTypeReference> fullName = this.fullNameToFieldType;
|
||||
CopyOnWriteHashMap<String, MappedFieldTypeReference> indexName = this.indexNameToFieldType;
|
||||
|
||||
for (FieldMapper fieldMapper : newFieldMappers) {
|
||||
MappedFieldType fieldType = fieldMapper.fieldType();
|
||||
MappedFieldTypeReference fullNameRef = fullName.get(fieldType.names().fullName());
|
||||
MappedFieldTypeReference indexNameRef = indexName.get(fieldType.names().indexName());
|
||||
if (fullNameRef == null && indexNameRef == null) {
|
||||
// new field, just use the ref from this field mapper
|
||||
fullName = fullName.copyAndPut(fieldType.names().fullName(), fieldMapper.fieldTypeReference());
|
||||
indexName = indexName.copyAndPut(fieldType.names().indexName(), fieldMapper.fieldTypeReference());
|
||||
} else if (fullNameRef == null) {
|
||||
// this index name already exists, so copy over the reference
|
||||
fullName = fullName.copyAndPut(fieldType.names().fullName(), indexNameRef);
|
||||
indexNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
|
||||
fieldMapper.setFieldTypeReference(indexNameRef);
|
||||
} else if (indexNameRef == null) {
|
||||
// this full name already exists, so copy over the reference
|
||||
indexName = indexName.copyAndPut(fieldType.names().indexName(), fullNameRef);
|
||||
fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
|
||||
fieldMapper.setFieldTypeReference(fullNameRef);
|
||||
} else if (fullNameRef == indexNameRef) {
|
||||
// the field already exists, so replace the reference in this mapper with the pre-existing one
|
||||
fullNameRef.set(fieldMapper.fieldType()); // field type is updated, since modifiable settings may have changed
|
||||
fieldMapper.setFieldTypeReference(fullNameRef);
|
||||
} else {
|
||||
// this new field bridges between two existing field names (a full and index name), which we cannot support
|
||||
throw new IllegalStateException("insane mappings found. field " + fieldType.names().fullName() + " maps across types to field " + fieldType.names().indexName());
|
||||
}
|
||||
}
|
||||
return new FieldTypeLookup(fullName, indexName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the give mapper's field types are compatibile with existing field types.
|
||||
* If any are not compatible, an IllegalArgumentException is thrown.
|
||||
* If updateAllTypes is true, only basic compatibility is checked.
|
||||
*/
|
||||
public void checkCompatibility(Collection<FieldMapper> newFieldMappers, boolean updateAllTypes) {
|
||||
for (FieldMapper fieldMapper : newFieldMappers) {
|
||||
MappedFieldTypeReference ref = fullNameToFieldType.get(fieldMapper.fieldType().names().fullName());
|
||||
if (ref != null) {
|
||||
boolean strict = ref.getRefCount() > 1 && updateAllTypes == false;
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
ref.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with existing mapping in other types" + conflicts.toString());
|
||||
}
|
||||
}
|
||||
|
||||
// field type for the index name must be compatible too
|
||||
MappedFieldTypeReference indexNameRef = fullNameToFieldType.get(fieldMapper.fieldType().names().indexName());
|
||||
if (indexNameRef != null) {
|
||||
boolean strict = indexNameRef.getRefCount() > 1 && updateAllTypes == false;
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
indexNameRef.get().checkCompatibility(fieldMapper.fieldType(), conflicts, strict);
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Mapper for [" + fieldMapper.fieldType().names().fullName() + "] conflicts with mapping with the same index name in other types" + conflicts.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns the field for the given field */
|
||||
public MappedFieldType get(String field) {
|
||||
MappedFieldTypeReference ref = fullNameToFieldType.get(field);
|
||||
if (ref == null) return null;
|
||||
return ref.get();
|
||||
}
|
||||
|
||||
/** Returns the field type for the given index name */
|
||||
public MappedFieldType getByIndexName(String field) {
|
||||
MappedFieldTypeReference ref = indexNameToFieldType.get(field);
|
||||
if (ref == null) return null;
|
||||
return ref.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of the index names of a simple match regex like pattern against full name and index name.
|
||||
*/
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern) {
|
||||
Set<String> fields = Sets.newHashSet();
|
||||
for (MappedFieldType fieldType : this) {
|
||||
if (Regex.simpleMatch(pattern, fieldType.names().fullName())) {
|
||||
fields.add(fieldType.names().indexName());
|
||||
} else if (Regex.simpleMatch(pattern, fieldType.names().indexName())) {
|
||||
fields.add(fieldType.names().indexName());
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of the full names of a simple match regex like pattern against full name and index name.
|
||||
*/
|
||||
public Collection<String> simpleMatchToFullName(String pattern) {
|
||||
Set<String> fields = Sets.newHashSet();
|
||||
for (MappedFieldType fieldType : this) {
|
||||
if (Regex.simpleMatch(pattern, fieldType.names().fullName())) {
|
||||
fields.add(fieldType.names().fullName());
|
||||
} else if (Regex.simpleMatch(pattern, fieldType.names().indexName())) {
|
||||
fields.add(fieldType.names().fullName());
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
public Iterator<MappedFieldType> iterator() {
|
||||
return Iterators.transform(fullNameToFieldType.values().iterator(), UNWRAPPER);
|
||||
}
|
||||
}
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
@ -38,7 +37,6 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
|
@ -228,8 +226,10 @@ public class MappedFieldType extends FieldType {
|
|||
|
||||
/**
|
||||
* Checks for any conflicts between this field type and other.
|
||||
* If strict is true, all properties must be equal.
|
||||
* Otherwise, only properties which must never change in an index are checked.
|
||||
*/
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts, boolean strict) {
|
||||
boolean indexed = indexOptions() != IndexOptions.NONE;
|
||||
boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE;
|
||||
// TODO: should be validating if index options go "up" (but "down" is ok)
|
||||
|
@ -277,10 +277,30 @@ public class MappedFieldType extends FieldType {
|
|||
if (!names().equals(other.names())) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different index_name");
|
||||
}
|
||||
|
||||
if (Objects.equals(similarity(), other.similarity()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different similarity");
|
||||
}
|
||||
|
||||
if (strict) {
|
||||
if (omitNorms() != other.omitNorms()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [omit_norms] across all types.");
|
||||
}
|
||||
if (boost() != other.boost()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types.");
|
||||
}
|
||||
if (normsLoading() != other.normsLoading()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [norms].loading across all types.");
|
||||
}
|
||||
if (Objects.equals(searchAnalyzer(), other.searchAnalyzer()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types.");
|
||||
}
|
||||
if (Objects.equals(fieldDataType(), other.fieldDataType()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types.");
|
||||
}
|
||||
if (Objects.equals(nullValue(), other.nullValue()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isNumeric() {
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
/**
|
||||
* A container for a {@link MappedFieldType} which can be updated and is reference counted.
|
||||
*/
|
||||
public class MappedFieldTypeReference {
|
||||
private MappedFieldType fieldType; // the current field type this reference points to
|
||||
private int refCount;
|
||||
|
||||
public MappedFieldTypeReference(MappedFieldType fieldType) {
|
||||
fieldType.freeze(); // ensure frozen
|
||||
this.fieldType = fieldType;
|
||||
this.refCount = 1;
|
||||
}
|
||||
|
||||
public MappedFieldType get() {
|
||||
return fieldType;
|
||||
}
|
||||
|
||||
public void set(MappedFieldType fieldType) {
|
||||
fieldType.freeze(); // ensure frozen
|
||||
this.fieldType = fieldType;
|
||||
}
|
||||
|
||||
public int getRefCount() {
|
||||
return refCount;
|
||||
}
|
||||
|
||||
public void incRefCount() {
|
||||
++refCount;
|
||||
}
|
||||
}
|
|
@ -86,14 +86,18 @@ public interface Mapper extends ToXContent, Iterable<Mapper> {
|
|||
|
||||
private final SimilarityLookupService similarityLookupService;
|
||||
|
||||
private final MapperService mapperService;
|
||||
|
||||
private final ImmutableMap<String, TypeParser> typeParsers;
|
||||
|
||||
private final Version indexVersionCreated;
|
||||
|
||||
public ParserContext(AnalysisService analysisService, SimilarityLookupService similarityLookupService,
|
||||
MapperService mapperService,
|
||||
ImmutableMap<String, TypeParser> typeParsers, Version indexVersionCreated) {
|
||||
this.analysisService = analysisService;
|
||||
this.similarityLookupService = similarityLookupService;
|
||||
this.mapperService = mapperService;
|
||||
this.typeParsers = typeParsers;
|
||||
this.indexVersionCreated = indexVersionCreated;
|
||||
}
|
||||
|
@ -106,6 +110,10 @@ public interface Mapper extends ToXContent, Iterable<Mapper> {
|
|||
return similarityLookupService;
|
||||
}
|
||||
|
||||
public MapperService mapperService() {
|
||||
return mapperService;
|
||||
}
|
||||
|
||||
public TypeParser typeParser(String type) {
|
||||
return typeParsers.get(Strings.toUnderscoreCase(type));
|
||||
}
|
||||
|
|
|
@ -37,48 +37,48 @@ public final class MapperBuilders {
|
|||
|
||||
}
|
||||
|
||||
public static DocumentMapper.Builder doc(String index, Settings settings, RootObjectMapper.Builder objectBuilder) {
|
||||
return new DocumentMapper.Builder(index, settings, objectBuilder);
|
||||
public static DocumentMapper.Builder doc(String index, Settings settings, RootObjectMapper.Builder objectBuilder, MapperService mapperService) {
|
||||
return new DocumentMapper.Builder(index, settings, objectBuilder, mapperService);
|
||||
}
|
||||
|
||||
public static SourceFieldMapper.Builder source() {
|
||||
return new SourceFieldMapper.Builder();
|
||||
}
|
||||
|
||||
public static IdFieldMapper.Builder id() {
|
||||
return new IdFieldMapper.Builder();
|
||||
public static IdFieldMapper.Builder id(MappedFieldType existing) {
|
||||
return new IdFieldMapper.Builder(existing);
|
||||
}
|
||||
|
||||
public static RoutingFieldMapper.Builder routing() {
|
||||
return new RoutingFieldMapper.Builder();
|
||||
public static RoutingFieldMapper.Builder routing(MappedFieldType existing) {
|
||||
return new RoutingFieldMapper.Builder(existing);
|
||||
}
|
||||
|
||||
public static UidFieldMapper.Builder uid() {
|
||||
return new UidFieldMapper.Builder();
|
||||
public static UidFieldMapper.Builder uid(MappedFieldType existing) {
|
||||
return new UidFieldMapper.Builder(existing);
|
||||
}
|
||||
|
||||
public static SizeFieldMapper.Builder size() {
|
||||
return new SizeFieldMapper.Builder();
|
||||
public static SizeFieldMapper.Builder size(MappedFieldType existing) {
|
||||
return new SizeFieldMapper.Builder(existing);
|
||||
}
|
||||
|
||||
public static VersionFieldMapper.Builder version() {
|
||||
return new VersionFieldMapper.Builder();
|
||||
}
|
||||
|
||||
public static TypeFieldMapper.Builder type() {
|
||||
return new TypeFieldMapper.Builder();
|
||||
public static TypeFieldMapper.Builder type(MappedFieldType existing) {
|
||||
return new TypeFieldMapper.Builder(existing);
|
||||
}
|
||||
|
||||
public static FieldNamesFieldMapper.Builder fieldNames() {
|
||||
return new FieldNamesFieldMapper.Builder();
|
||||
}
|
||||
|
||||
public static IndexFieldMapper.Builder index() {
|
||||
return new IndexFieldMapper.Builder();
|
||||
public static IndexFieldMapper.Builder index(MappedFieldType existing) {
|
||||
return new IndexFieldMapper.Builder(existing);
|
||||
}
|
||||
|
||||
public static TimestampFieldMapper.Builder timestamp() {
|
||||
return new TimestampFieldMapper.Builder();
|
||||
public static TimestampFieldMapper.Builder timestamp(MappedFieldType existing) {
|
||||
return new TimestampFieldMapper.Builder(existing);
|
||||
}
|
||||
|
||||
public static TTLFieldMapper.Builder ttl() {
|
||||
|
@ -89,8 +89,8 @@ public final class MapperBuilders {
|
|||
return new ParentFieldMapper.Builder();
|
||||
}
|
||||
|
||||
public static AllFieldMapper.Builder all() {
|
||||
return new AllFieldMapper.Builder();
|
||||
public static AllFieldMapper.Builder all(MappedFieldType existing) {
|
||||
return new AllFieldMapper.Builder(existing);
|
||||
}
|
||||
|
||||
public static RootObjectMapper.Builder rootObject(String name) {
|
||||
|
|
|
@ -21,7 +21,10 @@ package org.elasticsearch.index.mapper;
|
|||
|
||||
import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.google.common.base.Predicate;
|
||||
import com.google.common.collect.*;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Iterators;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
|
||||
|
@ -36,7 +39,6 @@ import org.apache.lucene.search.ConstantScoreQuery;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
|
@ -102,8 +104,8 @@ public class MapperService extends AbstractIndexComponent {
|
|||
final ReentrantReadWriteLock mappingLock = new ReentrantReadWriteLock();
|
||||
private final ReleasableLock mappingWriteLock = new ReleasableLock(mappingLock.writeLock());
|
||||
|
||||
private volatile FieldMappersLookup fieldMappers;
|
||||
private volatile ImmutableOpenMap<String, ObjectMappers> fullPathObjectMappers = ImmutableOpenMap.of();
|
||||
private volatile FieldTypeLookup fieldTypes;
|
||||
private volatile ImmutableOpenMap<String, ObjectMapper> fullPathObjectMappers = ImmutableOpenMap.of();
|
||||
private boolean hasNested = false; // updated dynamically to true when a nested object is added
|
||||
|
||||
private final DocumentMapperParser documentParser;
|
||||
|
@ -124,7 +126,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
super(index, indexSettings);
|
||||
this.analysisService = analysisService;
|
||||
this.fieldDataService = fieldDataService;
|
||||
this.fieldMappers = new FieldMappersLookup();
|
||||
this.fieldTypes = new FieldTypeLookup();
|
||||
this.documentParser = new DocumentMapperParser(index, indexSettings, this, analysisService, similarityLookupService, scriptService);
|
||||
this.searchAnalyzer = new SmartIndexNameSearchAnalyzer(analysisService.defaultSearchAnalyzer());
|
||||
this.searchQuoteAnalyzer = new SmartIndexNameSearchQuoteAnalyzer(analysisService.defaultSearchQuoteAnalyzer());
|
||||
|
@ -214,7 +216,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
typeListeners.remove(listener);
|
||||
}
|
||||
|
||||
public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault) {
|
||||
public DocumentMapper merge(String type, CompressedXContent mappingSource, boolean applyDefault, boolean updateAllTypes) {
|
||||
if (DEFAULT_MAPPING.equals(type)) {
|
||||
// verify we can parse it
|
||||
DocumentMapper mapper = documentParser.parseCompressed(type, mappingSource);
|
||||
|
@ -230,13 +232,13 @@ public class MapperService extends AbstractIndexComponent {
|
|||
}
|
||||
return mapper;
|
||||
} else {
|
||||
return merge(parse(type, mappingSource, applyDefault));
|
||||
return merge(parse(type, mappingSource, applyDefault), updateAllTypes);
|
||||
}
|
||||
}
|
||||
|
||||
// never expose this to the outside world, we need to reparse the doc mapper so we get fresh
|
||||
// instances of field mappers to properly remove existing doc mapper
|
||||
private DocumentMapper merge(DocumentMapper mapper) {
|
||||
private DocumentMapper merge(DocumentMapper mapper, boolean updateAllTypes) {
|
||||
try (ReleasableLock lock = mappingWriteLock.acquire()) {
|
||||
if (mapper.type().length() == 0) {
|
||||
throw new InvalidTypeNameException("mapping type name is empty");
|
||||
|
@ -262,7 +264,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
DocumentMapper oldMapper = mappers.get(mapper.type());
|
||||
|
||||
if (oldMapper != null) {
|
||||
MergeResult result = oldMapper.merge(mapper.mapping(), false);
|
||||
MergeResult result = oldMapper.merge(mapper.mapping(), false, updateAllTypes);
|
||||
if (result.hasConflicts()) {
|
||||
// TODO: What should we do???
|
||||
if (logger.isDebugEnabled()) {
|
||||
|
@ -270,19 +272,18 @@ public class MapperService extends AbstractIndexComponent {
|
|||
}
|
||||
}
|
||||
fieldDataService.onMappingUpdate();
|
||||
assert assertSerialization(oldMapper);
|
||||
return oldMapper;
|
||||
} else {
|
||||
List<ObjectMapper> newObjectMappers = new ArrayList<>();
|
||||
List<FieldMapper> newFieldMappers = new ArrayList<>();
|
||||
for (RootMapper rootMapper : mapper.mapping().rootMappers) {
|
||||
if (rootMapper instanceof FieldMapper) {
|
||||
newFieldMappers.add((FieldMapper)rootMapper);
|
||||
newFieldMappers.add((FieldMapper) rootMapper);
|
||||
}
|
||||
}
|
||||
MapperUtils.collect(mapper.mapping().root, newObjectMappers, newFieldMappers);
|
||||
addFieldMappers(newFieldMappers);
|
||||
addObjectMappers(newObjectMappers);
|
||||
checkNewMappersCompatibility(newObjectMappers, newFieldMappers, updateAllTypes);
|
||||
addMappers(newObjectMappers, newFieldMappers);
|
||||
|
||||
for (DocumentTypeListener typeListener : typeListeners) {
|
||||
typeListener.beforeCreate(mapper);
|
||||
|
@ -313,28 +314,33 @@ public class MapperService extends AbstractIndexComponent {
|
|||
return true;
|
||||
}
|
||||
|
||||
protected void addObjectMappers(Collection<ObjectMapper> objectMappers) {
|
||||
protected void checkNewMappersCompatibility(Collection<ObjectMapper> newObjectMappers, Collection<FieldMapper> newFieldMappers, boolean updateAllTypes) {
|
||||
assert mappingLock.isWriteLockedByCurrentThread();
|
||||
ImmutableOpenMap.Builder<String, ObjectMappers> fullPathObjectMappers = ImmutableOpenMap.builder(this.fullPathObjectMappers);
|
||||
for (ObjectMapper objectMapper : objectMappers) {
|
||||
ObjectMappers mappers = fullPathObjectMappers.get(objectMapper.fullPath());
|
||||
if (mappers == null) {
|
||||
mappers = new ObjectMappers(objectMapper);
|
||||
} else {
|
||||
mappers = mappers.concat(objectMapper);
|
||||
for (ObjectMapper newObjectMapper : newObjectMappers) {
|
||||
ObjectMapper existingObjectMapper = fullPathObjectMappers.get(newObjectMapper.fullPath());
|
||||
if (existingObjectMapper != null) {
|
||||
MergeResult result = new MergeResult(true, updateAllTypes);
|
||||
existingObjectMapper.merge(newObjectMapper, result);
|
||||
if (result.hasConflicts()) {
|
||||
throw new IllegalArgumentException("Mapper for [" + newObjectMapper.fullPath() + "] conflicts with existing mapping in other types" +
|
||||
Arrays.toString(result.buildConflicts()));
|
||||
}
|
||||
}
|
||||
fullPathObjectMappers.put(objectMapper.fullPath(), mappers);
|
||||
// update the hasNested flag
|
||||
}
|
||||
fieldTypes.checkCompatibility(newFieldMappers, updateAllTypes);
|
||||
}
|
||||
|
||||
protected void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers) {
|
||||
assert mappingLock.isWriteLockedByCurrentThread();
|
||||
ImmutableOpenMap.Builder<String, ObjectMapper> fullPathObjectMappers = ImmutableOpenMap.builder(this.fullPathObjectMappers);
|
||||
for (ObjectMapper objectMapper : objectMappers) {
|
||||
fullPathObjectMappers.put(objectMapper.fullPath(), objectMapper);
|
||||
if (objectMapper.nested().isNested()) {
|
||||
hasNested = true;
|
||||
}
|
||||
}
|
||||
this.fullPathObjectMappers = fullPathObjectMappers.build();
|
||||
}
|
||||
|
||||
protected void addFieldMappers(Collection<FieldMapper> fieldMappers) {
|
||||
assert mappingLock.isWriteLockedByCurrentThread();
|
||||
this.fieldMappers = this.fieldMappers.copyAndAddAll(fieldMappers);
|
||||
this.fieldTypes = this.fieldTypes.copyAndAddAll(fieldMappers);
|
||||
}
|
||||
|
||||
public DocumentMapper parse(String mappingType, CompressedXContent mappingSource, boolean applyDefault) throws MapperParsingException {
|
||||
|
@ -479,11 +485,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
* If multiple types have fields with the same index name, the first is returned.
|
||||
*/
|
||||
public MappedFieldType indexName(String indexName) {
|
||||
FieldMappers mappers = fieldMappers.indexName(indexName);
|
||||
if (mappers == null) {
|
||||
return null;
|
||||
}
|
||||
return mappers.mapper().fieldType();
|
||||
return fieldTypes.getByIndexName(indexName);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -492,11 +494,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
* If multiple types have fields with the same full name, the first is returned.
|
||||
*/
|
||||
public MappedFieldType fullName(String fullName) {
|
||||
FieldMappers mappers = fieldMappers.fullName(fullName);
|
||||
if (mappers == null) {
|
||||
return null;
|
||||
}
|
||||
return mappers.mapper().fieldType();
|
||||
return fieldTypes.get(fullName);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -504,52 +502,21 @@ public class MapperService extends AbstractIndexComponent {
|
|||
* then the fields will be returned with a type prefix.
|
||||
*/
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern) {
|
||||
return simpleMatchToIndexNames(pattern, null);
|
||||
}
|
||||
/**
|
||||
* Returns all the fields that match the given pattern, with an optional narrowing
|
||||
* based on a list of types.
|
||||
*/
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern, @Nullable String[] types) {
|
||||
if (Regex.isSimpleMatchPattern(pattern) == false) {
|
||||
// no wildcards
|
||||
return ImmutableList.of(pattern);
|
||||
}
|
||||
|
||||
if (MetaData.isAllTypes(types)) {
|
||||
return fieldMappers.simpleMatchToIndexNames(pattern);
|
||||
}
|
||||
|
||||
List<String> fields = Lists.newArrayList();
|
||||
for (String type : types) {
|
||||
DocumentMapper possibleDocMapper = mappers.get(type);
|
||||
if (possibleDocMapper != null) {
|
||||
for (String indexName : possibleDocMapper.mappers().simpleMatchToIndexNames(pattern)) {
|
||||
fields.add(indexName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
return fieldTypes.simpleMatchToIndexNames(pattern);
|
||||
}
|
||||
|
||||
// TODO: remove this since the underlying index names are now the same across all types
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern, @Nullable String[] types) {
|
||||
return simpleMatchToIndexNames(pattern);
|
||||
}
|
||||
|
||||
// TODO: remove types param, since the object mapper must be the same across all types
|
||||
public ObjectMapper getObjectMapper(String name, @Nullable String[] types) {
|
||||
if (types == null || types.length == 0 || types.length == 1 && types[0].equals("_all")) {
|
||||
ObjectMappers mappers = fullPathObjectMappers.get(name);
|
||||
if (mappers != null) {
|
||||
return mappers.mapper();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
for (String type : types) {
|
||||
DocumentMapper possibleDocMapper = mappers.get(type);
|
||||
if (possibleDocMapper != null) {
|
||||
ObjectMapper mapper = possibleDocMapper.objectMappers().get(name);
|
||||
if (mapper != null) {
|
||||
return mapper;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
return fullPathObjectMappers.get(name);
|
||||
}
|
||||
|
||||
public MappedFieldType smartNameFieldType(String smartName) {
|
||||
|
@ -560,22 +527,9 @@ public class MapperService extends AbstractIndexComponent {
|
|||
return indexName(smartName);
|
||||
}
|
||||
|
||||
// TODO: remove this since the underlying index names are now the same across all types
|
||||
public MappedFieldType smartNameFieldType(String smartName, @Nullable String[] types) {
|
||||
if (types == null || types.length == 0 || types.length == 1 && types[0].equals("_all")) {
|
||||
return smartNameFieldType(smartName);
|
||||
}
|
||||
for (String type : types) {
|
||||
DocumentMapper documentMapper = mappers.get(type);
|
||||
// we found a mapper
|
||||
if (documentMapper != null) {
|
||||
// see if we find a field for it
|
||||
FieldMappers mappers = documentMapper.mappers().smartName(smartName);
|
||||
if (mappers != null) {
|
||||
return mappers.mapper().fieldType();
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
return smartNameFieldType(smartName);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -622,18 +576,14 @@ public class MapperService extends AbstractIndexComponent {
|
|||
} else {
|
||||
do {
|
||||
String objectPath = fieldName.substring(0, indexOf);
|
||||
ObjectMappers objectMappers = fullPathObjectMappers.get(objectPath);
|
||||
if (objectMappers == null) {
|
||||
ObjectMapper objectMapper = fullPathObjectMappers.get(objectPath);
|
||||
if (objectMapper == null) {
|
||||
indexOf = objectPath.lastIndexOf('.');
|
||||
continue;
|
||||
}
|
||||
|
||||
if (objectMappers.hasNested()) {
|
||||
for (ObjectMapper objectMapper : objectMappers) {
|
||||
if (objectMapper.nested().isNested()) {
|
||||
return objectMapper;
|
||||
}
|
||||
}
|
||||
if (objectMapper.nested().isNested()) {
|
||||
return objectMapper;
|
||||
}
|
||||
|
||||
indexOf = objectPath.lastIndexOf('.');
|
||||
|
|
|
@ -19,28 +19,16 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
public enum MapperUtils {
|
||||
;
|
||||
|
||||
private static MergeResult newStrictMergeResult() {
|
||||
return new MergeResult(false) {
|
||||
|
||||
@Override
|
||||
public boolean hasConflicts() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] buildConflicts() {
|
||||
return Strings.EMPTY_ARRAY;
|
||||
}
|
||||
return new MergeResult(false, false) {
|
||||
|
||||
@Override
|
||||
public void addFieldMappers(Collection<FieldMapper> fieldMappers) {
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -26,29 +27,55 @@ import java.util.Collection;
|
|||
import java.util.List;
|
||||
|
||||
/** A container for tracking results of a mapping merge. */
|
||||
public abstract class MergeResult {
|
||||
public class MergeResult {
|
||||
|
||||
private final boolean simulate;
|
||||
private final boolean updateAllTypes;
|
||||
|
||||
public MergeResult(boolean simulate) {
|
||||
private final List<String> conflicts = new ArrayList<>();
|
||||
private final List<FieldMapper> newFieldMappers = new ArrayList<>();
|
||||
private final List<ObjectMapper> newObjectMappers = new ArrayList<>();
|
||||
|
||||
public MergeResult(boolean simulate, boolean updateAllTypes) {
|
||||
this.simulate = simulate;
|
||||
this.updateAllTypes = updateAllTypes;
|
||||
}
|
||||
|
||||
public abstract void addFieldMappers(Collection<FieldMapper> fieldMappers);
|
||||
public void addFieldMappers(Collection<FieldMapper> fieldMappers) {
|
||||
assert simulate() == false;
|
||||
newFieldMappers.addAll(fieldMappers);
|
||||
}
|
||||
|
||||
public abstract void addObjectMappers(Collection<ObjectMapper> objectMappers);
|
||||
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
|
||||
assert simulate() == false;
|
||||
newObjectMappers.addAll(objectMappers);
|
||||
}
|
||||
|
||||
public abstract Collection<FieldMapper> getNewFieldMappers();
|
||||
public Collection<FieldMapper> getNewFieldMappers() {
|
||||
return newFieldMappers;
|
||||
}
|
||||
|
||||
public abstract Collection<ObjectMapper> getNewObjectMappers();
|
||||
public Collection<ObjectMapper> getNewObjectMappers() {
|
||||
return newObjectMappers;
|
||||
}
|
||||
|
||||
public boolean simulate() {
|
||||
return simulate;
|
||||
}
|
||||
|
||||
public abstract void addConflict(String mergeFailure);
|
||||
public boolean updateAllTypes() {
|
||||
return updateAllTypes;
|
||||
}
|
||||
|
||||
public abstract boolean hasConflicts();
|
||||
public void addConflict(String mergeFailure) {
|
||||
conflicts.add(mergeFailure);
|
||||
}
|
||||
|
||||
public abstract String[] buildConflicts();
|
||||
}
|
||||
public boolean hasConflicts() {
|
||||
return conflicts.isEmpty() == false;
|
||||
}
|
||||
|
||||
public String[] buildConflicts() {
|
||||
return conflicts.toArray(Strings.EMPTY_ARRAY);
|
||||
}
|
||||
}
|
|
@ -280,6 +280,11 @@ public abstract class ParseContext {
|
|||
return in.analysisService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapperService mapperService() {
|
||||
return in.mapperService();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id() {
|
||||
return in.id();
|
||||
|
@ -513,6 +518,11 @@ public abstract class ParseContext {
|
|||
return docMapperParser.analysisService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapperService mapperService() {
|
||||
return docMapperParser.mapperService;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String id() {
|
||||
return id;
|
||||
|
@ -701,6 +711,8 @@ public abstract class ParseContext {
|
|||
|
||||
public abstract AnalysisService analysisService();
|
||||
|
||||
public abstract MapperService mapperService();
|
||||
|
||||
public abstract String id();
|
||||
|
||||
public abstract void ignoredValue(String indexName, String value);
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.core;
|
|||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import com.google.common.base.Function;
|
||||
import com.google.common.base.Objects;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -39,6 +38,7 @@ import org.elasticsearch.index.fielddata.FieldDataType;
|
|||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldTypeReference;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
|
@ -133,7 +133,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
|||
}
|
||||
|
||||
public T storeTermVectors(boolean termVectors) {
|
||||
if (termVectors) {
|
||||
if (termVectors != this.fieldType.storeTermVectors()) {
|
||||
this.fieldType.setStoreTermVectors(termVectors);
|
||||
} // don't set it to false, it is default and might be flipped by a more specific option
|
||||
return builder;
|
||||
|
@ -268,7 +268,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
|||
}
|
||||
}
|
||||
|
||||
protected MappedFieldType fieldType;
|
||||
protected MappedFieldTypeReference fieldTypeRef;
|
||||
protected final boolean hasDefaultDocValues;
|
||||
protected Settings customFieldDataSettings;
|
||||
protected final MultiFields multiFields;
|
||||
|
@ -302,14 +302,16 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
|||
}
|
||||
hasDefaultDocValues = docValues == null;
|
||||
|
||||
this.fieldType = fieldType.clone();
|
||||
this.fieldTypeRef = new MappedFieldTypeReference(fieldType); // must init first so defaultDocValues() can be called
|
||||
fieldType = fieldType.clone();
|
||||
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
|
||||
this.fieldType().setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
this.fieldType().setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
}
|
||||
this.fieldType().setHasDocValues(docValues == null ? defaultDocValues() : docValues);
|
||||
this.fieldType().setFieldDataType(fieldDataType);
|
||||
this.fieldType().freeze();
|
||||
fieldType.setHasDocValues(docValues == null ? defaultDocValues() : docValues);
|
||||
fieldType.setFieldDataType(fieldDataType);
|
||||
fieldType.freeze();
|
||||
this.fieldTypeRef.set(fieldType); // now reset ref once extra settings have been initialized
|
||||
|
||||
this.multiFields = multiFields;
|
||||
this.copyTo = copyTo;
|
||||
|
@ -335,7 +337,21 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
|||
|
||||
@Override
|
||||
public MappedFieldType fieldType() {
|
||||
return fieldType;
|
||||
return fieldTypeRef.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldTypeReference fieldTypeReference() {
|
||||
return fieldTypeRef;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFieldTypeReference(MappedFieldTypeReference ref) {
|
||||
if (ref.get().equals(fieldType()) == false) {
|
||||
throw new IllegalStateException("Cannot overwrite field type reference to unequal reference");
|
||||
}
|
||||
ref.incRefCount();
|
||||
this.fieldTypeRef = ref;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -393,7 +409,8 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
|||
}
|
||||
AbstractFieldMapper fieldMergeWith = (AbstractFieldMapper) mergeWith;
|
||||
List<String> subConflicts = new ArrayList<>(); // TODO: just expose list from MergeResult?
|
||||
fieldType().checkCompatibility(fieldMergeWith.fieldType(), subConflicts);
|
||||
boolean strict = this.fieldTypeRef.getRefCount() > 1 && mergeResult.updateAllTypes() == false;
|
||||
fieldType().checkCompatibility(fieldMergeWith.fieldType(), subConflicts, strict);
|
||||
for (String conflict : subConflicts) {
|
||||
mergeResult.addConflict(conflict);
|
||||
}
|
||||
|
@ -401,13 +418,10 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
|||
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
// apply changeable values
|
||||
this.fieldType = fieldMergeWith.fieldType().clone();
|
||||
this.fieldType().freeze();
|
||||
if (fieldMergeWith.customFieldDataSettings != null) {
|
||||
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
|
||||
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
|
||||
}
|
||||
}
|
||||
MappedFieldType fieldType = fieldMergeWith.fieldType().clone();
|
||||
fieldType.freeze();
|
||||
fieldTypeRef.set(fieldType);
|
||||
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
|
||||
this.copyTo = fieldMergeWith.copyTo;
|
||||
}
|
||||
}
|
||||
|
@ -468,7 +482,8 @@ public abstract class AbstractFieldMapper implements FieldMapper {
|
|||
}
|
||||
|
||||
TreeMap<String, Object> orderedFielddataSettings = new TreeMap<>();
|
||||
if (customFieldDataSettings != null) {
|
||||
boolean hasCustomFieldDataSettings = customFieldDataSettings != null && customFieldDataSettings.equals(Settings.EMPTY) == false;
|
||||
if (hasCustomFieldDataSettings) {
|
||||
orderedFielddataSettings.putAll(customFieldDataSettings.getAsMap());
|
||||
builder.field("fielddata", orderedFielddataSettings);
|
||||
} else if (includeDefaults) {
|
||||
|
|
|
@ -243,8 +243,8 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
|
||||
super.checkCompatibility(fieldType, conflicts);
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
|
||||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
CompletionFieldType other = (CompletionFieldType)fieldType;
|
||||
if (analyzingSuggestLookupProvider.hasPayloads() != other.analyzingSuggestLookupProvider.hasPayloads()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different payload values");
|
||||
|
|
|
@ -239,6 +239,7 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
if (!super.equals(o)) return false;
|
||||
DateFieldType that = (DateFieldType) o;
|
||||
return Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format()) &&
|
||||
Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale()) &&
|
||||
Objects.equals(timeUnit, that.timeUnit);
|
||||
}
|
||||
|
||||
|
@ -247,6 +248,23 @@ public class DateFieldMapper extends NumberFieldMapper {
|
|||
return Objects.hash(super.hashCode(), dateTimeFormatter.format(), timeUnit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
|
||||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
if (strict) {
|
||||
DateFieldType other = (DateFieldType)fieldType;
|
||||
if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [format] across all types.");
|
||||
}
|
||||
if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types.");
|
||||
}
|
||||
if (Objects.equals(timeUnit(), other.timeUnit()) == false) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public FormatDateTimeFormatter dateTimeFormatter() {
|
||||
return dateTimeFormatter;
|
||||
}
|
||||
|
|
|
@ -317,8 +317,14 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
|
|||
if (!this.getClass().equals(mergeWith.getClass())) {
|
||||
return;
|
||||
}
|
||||
if (!mergeResult.simulate()) {
|
||||
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
|
||||
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
|
||||
if (this.fieldTypeRef.getRefCount() > 1 && mergeResult.updateAllTypes() == false) {
|
||||
if (fieldType().numericPrecisionStep() != nfmMergeWith.fieldType().numericPrecisionStep()) {
|
||||
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] is used by multiple types. Set update_all_types to true to update precision_step across all types.");
|
||||
}
|
||||
}
|
||||
|
||||
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
|
||||
this.includeInAll = nfmMergeWith.includeInAll;
|
||||
if (nfmMergeWith.ignoreMalformed.explicit()) {
|
||||
this.ignoreMalformed = nfmMergeWith.ignoreMalformed;
|
||||
|
|
|
@ -182,8 +182,8 @@ public class TypeParsers {
|
|||
}
|
||||
|
||||
public static void parseField(AbstractFieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) {
|
||||
NamedAnalyzer indexAnalyzer = null;
|
||||
NamedAnalyzer searchAnalyzer = null;
|
||||
NamedAnalyzer indexAnalyzer = builder.fieldType.indexAnalyzer();
|
||||
NamedAnalyzer searchAnalyzer = builder.fieldType.searchAnalyzer();
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = fieldNode.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
final String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
|
|
|
@ -330,8 +330,8 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
|
|||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
|
||||
super.checkCompatibility(fieldType, conflicts);
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
|
||||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
GeoPointFieldType other = (GeoPointFieldType)fieldType;
|
||||
if (isLatLonEnabled() != other.isLatLonEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] has different lat_lon");
|
||||
|
|
|
@ -199,7 +199,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public MappedFieldType clone() {
|
||||
public GeoShapeFieldType clone() {
|
||||
return new GeoShapeFieldType(this);
|
||||
}
|
||||
|
||||
|
@ -246,8 +246,8 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts) {
|
||||
super.checkCompatibility(fieldType, conflicts);
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
|
||||
super.checkCompatibility(fieldType, conflicts, strict);
|
||||
GeoShapeFieldType other = (GeoShapeFieldType)fieldType;
|
||||
// prevent user from changing strategies
|
||||
if (strategyName().equals(other.strategyName()) == false) {
|
||||
|
|
|
@ -92,8 +92,8 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
|
||||
private EnabledAttributeMapper enabled = Defaults.ENABLED;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
builder = this;
|
||||
indexName = Defaults.INDEX_NAME;
|
||||
}
|
||||
|
@ -119,7 +119,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
AllFieldMapper.Builder builder = all();
|
||||
AllFieldMapper.Builder builder = all(parserContext.mapperService().fullName(NAME));
|
||||
|
||||
// parseField below will happily parse the doc_values setting, but it is then never passed to
|
||||
// the AllFieldMapper ctor in the builder since it is not valid. Here we validate
|
||||
|
@ -191,8 +191,11 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
|
||||
private EnabledAttributeMapper enabledState;
|
||||
|
||||
public AllFieldMapper(Settings indexSettings) {
|
||||
this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED, null, indexSettings);
|
||||
public AllFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
|
||||
Defaults.ENABLED,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
}
|
||||
|
||||
protected AllFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled,
|
||||
|
@ -312,7 +315,8 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
builder.field("similarity", SimilarityLookupService.DEFAULT_SIMILARITY);
|
||||
}
|
||||
|
||||
if (customFieldDataSettings != null) {
|
||||
boolean hasCustomFieldDataSettings = customFieldDataSettings != null && customFieldDataSettings.equals(Settings.EMPTY) == false;
|
||||
if (hasCustomFieldDataSettings) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
|
|
|
@ -159,6 +159,16 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
|||
return Objects.hash(super.hashCode(), enabled);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
|
||||
if (strict) {
|
||||
FieldNamesFieldType other = (FieldNamesFieldType)fieldType;
|
||||
if (isEnabled() != other.isEnabled()) {
|
||||
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [enabled] across all types.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void setEnabled(boolean enabled) {
|
||||
checkIfFrozen();
|
||||
this.enabled = enabled;
|
||||
|
@ -190,8 +200,10 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
|||
private final MappedFieldType defaultFieldType;
|
||||
private final boolean pre13Index; // if the index was created before 1.3, _field_names is always disabled
|
||||
|
||||
public FieldNamesFieldMapper(Settings indexSettings) {
|
||||
this(Defaults.FIELD_TYPE.clone(), null, indexSettings);
|
||||
public FieldNamesFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
}
|
||||
|
||||
public FieldNamesFieldMapper(MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
|
@ -199,9 +211,10 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
|
|||
this.defaultFieldType = Defaults.FIELD_TYPE;
|
||||
this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0);
|
||||
if (this.pre13Index) {
|
||||
this.fieldType = fieldType().clone();
|
||||
fieldType().setEnabled(false);
|
||||
fieldType().freeze();
|
||||
FieldNamesFieldType newFieldType = fieldType().clone();
|
||||
newFieldType.setEnabled(false);
|
||||
newFieldType.freeze();
|
||||
fieldTypeRef.set(newFieldType);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -92,8 +92,8 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
|
||||
private String path = Defaults.PATH;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
|
@ -120,7 +120,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
IdFieldMapper.Builder builder = id();
|
||||
IdFieldMapper.Builder builder = id(parserContext.mapperService().fullName(NAME));
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
|
@ -226,8 +226,10 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
|
||||
private final String path;
|
||||
|
||||
public IdFieldMapper(Settings indexSettings) {
|
||||
this(idFieldType(indexSettings), null, Defaults.PATH, null, indexSettings);
|
||||
public IdFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(idFieldType(indexSettings, existing), null, Defaults.PATH,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
}
|
||||
|
||||
protected IdFieldMapper(MappedFieldType fieldType, Boolean docValues, String path,
|
||||
|
@ -236,7 +238,10 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
this.path = path;
|
||||
}
|
||||
|
||||
private static MappedFieldType idFieldType(Settings indexSettings) {
|
||||
private static MappedFieldType idFieldType(Settings indexSettings, MappedFieldType existing) {
|
||||
if (existing != null) {
|
||||
return existing.clone();
|
||||
}
|
||||
MappedFieldType fieldType = Defaults.FIELD_TYPE.clone();
|
||||
boolean pre2x = Version.indexCreated(indexSettings).before(Version.V_2_0_0);
|
||||
if (pre2x && indexSettings.getAsBoolean("index.mapping._id.indexed", true) == false) {
|
||||
|
@ -306,12 +311,13 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
return builder;
|
||||
}
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
boolean hasCustomFieldDataSettings = customFieldDataSettings != null && customFieldDataSettings.equals(Settings.EMPTY) == false;
|
||||
|
||||
// if all are defaults, no sense to write it at all
|
||||
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored()
|
||||
&& fieldType().indexOptions() == Defaults.FIELD_TYPE.indexOptions()
|
||||
&& path == Defaults.PATH
|
||||
&& customFieldDataSettings == null) {
|
||||
&& hasCustomFieldDataSettings == false) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
|
@ -325,7 +331,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
builder.field("path", path);
|
||||
}
|
||||
|
||||
if (customFieldDataSettings != null) {
|
||||
if (hasCustomFieldDataSettings) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
|
|
|
@ -79,8 +79,8 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
|
||||
private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
IndexFieldMapper.Builder builder = MapperBuilders.index();
|
||||
IndexFieldMapper.Builder builder = MapperBuilders.index(parserContext.mapperService().fullName(NAME));
|
||||
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
}
|
||||
|
@ -144,8 +144,10 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
|
||||
private EnabledAttributeMapper enabledState;
|
||||
|
||||
public IndexFieldMapper(Settings indexSettings) {
|
||||
this(Defaults.FIELD_TYPE.clone(), Defaults.ENABLED_STATE, null, indexSettings);
|
||||
public IndexFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing,
|
||||
Defaults.ENABLED_STATE,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()), indexSettings);
|
||||
}
|
||||
|
||||
public IndexFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabledState,
|
||||
|
@ -204,9 +206,10 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
boolean hasCustomFieldDataSettings = customFieldDataSettings != null && customFieldDataSettings.equals(Settings.EMPTY) == false;
|
||||
|
||||
// if all defaults, no need to write it at all
|
||||
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED_STATE && customFieldDataSettings == null) {
|
||||
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED_STATE && hasCustomFieldDataSettings == false) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
|
@ -218,7 +221,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
}
|
||||
|
||||
if (indexCreatedBefore2x) {
|
||||
if (customFieldDataSettings != null) {
|
||||
if (hasCustomFieldDataSettings) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
|
|
|
@ -81,6 +81,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
|
||||
FIELD_TYPE.setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.LAZY_VALUE)));
|
||||
FIELD_TYPE.freeze();
|
||||
}
|
||||
}
|
||||
|
@ -234,11 +235,11 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
this.type = type;
|
||||
}
|
||||
|
||||
public ParentFieldMapper(Settings indexSettings) {
|
||||
this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings);
|
||||
this.fieldType = this.fieldType().clone();
|
||||
this.fieldType().setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.LAZY_VALUE)));
|
||||
this.fieldType().freeze();
|
||||
public ParentFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
|
||||
null,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
}
|
||||
|
||||
public String type() {
|
||||
|
@ -325,10 +326,11 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
return builder;
|
||||
}
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
boolean hasCustomFieldDataSettings = customFieldDataSettings != null && customFieldDataSettings.equals(Settings.EMPTY) == false;
|
||||
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
builder.field("type", type);
|
||||
if (customFieldDataSettings != null) {
|
||||
if (hasCustomFieldDataSettings) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
|
@ -339,21 +341,10 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
|
|||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
|
||||
ParentFieldMapper other = (ParentFieldMapper) mergeWith;
|
||||
if (Objects.equal(type, other.type) == false) {
|
||||
mergeResult.addConflict("The _parent field's type option can't be changed: [" + type + "]->[" + other.type + "]");
|
||||
}
|
||||
|
||||
if (!mergeResult.simulate()) {
|
||||
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
|
||||
this.fieldType = fieldMergeWith.fieldType().clone();
|
||||
this.fieldType().freeze();
|
||||
|
||||
if (fieldMergeWith.customFieldDataSettings != null) {
|
||||
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
|
||||
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
|
||||
}
|
||||
}
|
||||
super.merge(mergeWith, mergeResult);
|
||||
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
|
||||
if (Objects.equal(type, fieldMergeWith.type) == false) {
|
||||
mergeResult.addConflict("The _parent field's type option can't be changed: [" + type + "]->[" + fieldMergeWith.type + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -81,8 +81,8 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
|
|||
|
||||
private String path = Defaults.PATH;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
}
|
||||
|
||||
public Builder required(boolean required) {
|
||||
|
@ -97,14 +97,14 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
|
|||
|
||||
@Override
|
||||
public RoutingFieldMapper build(BuilderContext context) {
|
||||
return new RoutingFieldMapper(fieldType, required, path, fieldDataSettings, context.indexSettings());
|
||||
return new RoutingFieldMapper(fieldType, required, path, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
RoutingFieldMapper.Builder builder = routing();
|
||||
RoutingFieldMapper.Builder builder = routing(parserContext.mapperService().fullName(NAME));
|
||||
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
}
|
||||
|
@ -151,12 +151,12 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
|
|||
private boolean required;
|
||||
private final String path;
|
||||
|
||||
public RoutingFieldMapper(Settings indexSettings) {
|
||||
this(Defaults.FIELD_TYPE, Defaults.REQUIRED, Defaults.PATH, null, indexSettings);
|
||||
public RoutingFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), Defaults.REQUIRED, Defaults.PATH, indexSettings);
|
||||
}
|
||||
|
||||
protected RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(fieldType, false, fieldDataSettings, indexSettings);
|
||||
protected RoutingFieldMapper(MappedFieldType fieldType, boolean required, String path, Settings indexSettings) {
|
||||
super(fieldType, false, null, indexSettings);
|
||||
this.required = required;
|
||||
this.path = path;
|
||||
}
|
||||
|
|
|
@ -61,6 +61,8 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
|
|||
SIZE_FIELD_TYPE.setStored(true);
|
||||
SIZE_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_32_BIT);
|
||||
SIZE_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME));
|
||||
SIZE_FIELD_TYPE.setIndexAnalyzer(NumericIntegerAnalyzer.buildNamedAnalyzer(Defaults.PRECISION_STEP_32_BIT));
|
||||
SIZE_FIELD_TYPE.setSearchAnalyzer(NumericIntegerAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE));
|
||||
SIZE_FIELD_TYPE.freeze();
|
||||
}
|
||||
}
|
||||
|
@ -69,8 +71,8 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
|
|||
|
||||
protected EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.SIZE_FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT);
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.SIZE_FIELD_TYPE : existing, Defaults.PRECISION_STEP_32_BIT);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
|
@ -82,7 +84,7 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
|
|||
@Override
|
||||
public SizeFieldMapper build(BuilderContext context) {
|
||||
setupFieldType(context);
|
||||
return new SizeFieldMapper(enabledState, fieldType, fieldDataSettings, context.indexSettings());
|
||||
return new SizeFieldMapper(enabledState, fieldType, context.indexSettings());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -99,7 +101,7 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
|
|||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
SizeFieldMapper.Builder builder = size();
|
||||
SizeFieldMapper.Builder builder = size(parserContext.mapperService().fullName(NAME));
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
|
@ -118,14 +120,12 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
|
|||
|
||||
private EnabledAttributeMapper enabledState;
|
||||
|
||||
public SizeFieldMapper(Settings indexSettings) {
|
||||
this(Defaults.ENABLED_STATE, Defaults.SIZE_FIELD_TYPE.clone(), null, indexSettings);
|
||||
public SizeFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(Defaults.ENABLED_STATE, existing == null ? Defaults.SIZE_FIELD_TYPE.clone() : existing.clone(), indexSettings);
|
||||
}
|
||||
|
||||
public SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(fieldType, false,
|
||||
Defaults.IGNORE_MALFORMED, Defaults.COERCE, fieldDataSettings,
|
||||
indexSettings, MultiFields.empty(), null);
|
||||
public SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, Settings indexSettings) {
|
||||
super(fieldType, false, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings, MultiFields.empty(), null);
|
||||
this.enabledState = enabled;
|
||||
}
|
||||
|
||||
|
|
|
@ -95,8 +95,12 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
|
|||
private boolean explicitStore = false;
|
||||
private Boolean ignoreMissing = null;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT);
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.PRECISION_STEP_64_BIT);
|
||||
if (existing != null) {
|
||||
// if there is an existing type, always use that store value (only matters for < 2.0)
|
||||
explicitStore = true;
|
||||
}
|
||||
}
|
||||
|
||||
DateFieldType fieldType() {
|
||||
|
@ -137,7 +141,6 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
|
|||
@Override
|
||||
public TimestampFieldMapper build(BuilderContext context) {
|
||||
if (explicitStore == false && context.indexCreatedVersion().before(Version.V_2_0_0)) {
|
||||
assert fieldType.stored();
|
||||
fieldType.setStored(false);
|
||||
}
|
||||
setupFieldType(context);
|
||||
|
@ -160,7 +163,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
|
|||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
TimestampFieldMapper.Builder builder = timestamp();
|
||||
TimestampFieldMapper.Builder builder = timestamp(parserContext.mapperService().fullName(NAME));
|
||||
if (parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
}
|
||||
|
@ -234,7 +237,10 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
|
|||
}
|
||||
}
|
||||
|
||||
private static MappedFieldType defaultFieldType(Settings settings) {
|
||||
private static MappedFieldType defaultFieldType(Settings settings, MappedFieldType existing) {
|
||||
if (existing != null) {
|
||||
return existing;
|
||||
}
|
||||
return Version.indexCreated(settings).onOrAfter(Version.V_2_0_0) ? Defaults.FIELD_TYPE : Defaults.PRE_20_FIELD_TYPE;
|
||||
}
|
||||
|
||||
|
@ -245,9 +251,11 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
|
|||
private final MappedFieldType defaultFieldType;
|
||||
private final Boolean ignoreMissing;
|
||||
|
||||
public TimestampFieldMapper(Settings indexSettings) {
|
||||
this(defaultFieldType(indexSettings).clone(), null, Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP,
|
||||
null, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings);
|
||||
public TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(defaultFieldType(indexSettings, existing).clone(), null, Defaults.ENABLED, Defaults.PATH, Defaults.DEFAULT_TIMESTAMP,
|
||||
null, Defaults.IGNORE_MALFORMED, Defaults.COERCE,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
}
|
||||
|
||||
protected TimestampFieldMapper(MappedFieldType fieldType, Boolean docValues, EnabledAttributeMapper enabledState, String path,
|
||||
|
@ -258,7 +266,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
|
|||
this.enabledState = enabledState;
|
||||
this.path = path;
|
||||
this.defaultTimestamp = defaultTimestamp;
|
||||
this.defaultFieldType = defaultFieldType(indexSettings);
|
||||
this.defaultFieldType = defaultFieldType(indexSettings, null);
|
||||
this.ignoreMissing = ignoreMissing;
|
||||
}
|
||||
|
||||
|
@ -324,9 +332,10 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
|
|||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
|
||||
boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE;
|
||||
boolean hasCustomFieldDataSettings = customFieldDataSettings != null && customFieldDataSettings.equals(Settings.EMPTY) == false;
|
||||
|
||||
// if all are defaults, no sense to write it at all
|
||||
if (!includeDefaults && indexed == indexedDefault && customFieldDataSettings == null &&
|
||||
if (!includeDefaults && indexed == indexedDefault && hasCustomFieldDataSettings == false &&
|
||||
fieldType().stored() == Defaults.FIELD_TYPE.stored() && enabledState == Defaults.ENABLED && path == Defaults.PATH
|
||||
&& fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format())
|
||||
&& Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)
|
||||
|
@ -359,7 +368,7 @@ public class TimestampFieldMapper extends DateFieldMapper implements RootMapper
|
|||
builder.field("ignore_missing", ignoreMissing);
|
||||
}
|
||||
if (indexCreatedBefore2x) {
|
||||
if (customFieldDataSettings != null) {
|
||||
if (hasCustomFieldDataSettings) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
|
|
|
@ -81,15 +81,15 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, TypeFieldMapper> {
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TypeFieldMapper build(BuilderContext context) {
|
||||
fieldType.setNames(new MappedFieldType.Names(name, indexName, indexName, name));
|
||||
return new TypeFieldMapper(fieldType, fieldDataSettings, context.indexSettings());
|
||||
return new TypeFieldMapper(fieldType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
TypeFieldMapper.Builder builder = type();
|
||||
TypeFieldMapper.Builder builder = type(parserContext.mapperService().fullName(NAME));
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
return builder;
|
||||
}
|
||||
|
@ -142,12 +142,13 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
}
|
||||
}
|
||||
|
||||
public TypeFieldMapper(Settings indexSettings) {
|
||||
this(Defaults.FIELD_TYPE.clone(), null, indexSettings);
|
||||
public TypeFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(),
|
||||
indexSettings);
|
||||
}
|
||||
|
||||
public TypeFieldMapper(MappedFieldType fieldType, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(fieldType, false, fieldDataSettings, indexSettings);
|
||||
public TypeFieldMapper(MappedFieldType fieldType, Settings indexSettings) {
|
||||
super(fieldType, false, null, indexSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -82,8 +82,8 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
|
||||
public static class Builder extends AbstractFieldMapper.Builder<Builder, UidFieldMapper> {
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE);
|
||||
public Builder(MappedFieldType existing) {
|
||||
super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing);
|
||||
indexName = Defaults.NAME;
|
||||
}
|
||||
|
||||
|
@ -97,10 +97,10 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder = uid();
|
||||
if (parserContext.indexVersionCreated().onOrAfter(Version.V_2_0_0)) {
|
||||
throw new MapperParsingException(NAME + " is not configurable");
|
||||
}
|
||||
Builder builder = uid(parserContext.mapperService().fullName(NAME));
|
||||
parseField(builder, builder.name, node, parserContext);
|
||||
return builder;
|
||||
}
|
||||
|
@ -130,8 +130,10 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
}
|
||||
}
|
||||
|
||||
public UidFieldMapper(Settings indexSettings) {
|
||||
this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings);
|
||||
public UidFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing, null,
|
||||
existing == null ? null : (existing.fieldDataType() == null ? null : existing.fieldDataType().getSettings()),
|
||||
indexSettings);
|
||||
}
|
||||
|
||||
protected UidFieldMapper(MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
|
@ -218,15 +220,16 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
|
|||
return builder;
|
||||
}
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
boolean hasCustomFieldDataSettings = customFieldDataSettings != null && customFieldDataSettings.equals(Settings.EMPTY) == false;
|
||||
|
||||
// if defaults, don't output
|
||||
if (!includeDefaults && customFieldDataSettings == null) {
|
||||
if (!includeDefaults && hasCustomFieldDataSettings == false) {
|
||||
return builder;
|
||||
}
|
||||
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
|
||||
if (customFieldDataSettings != null) {
|
||||
if (hasCustomFieldDataSettings) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
|
|
|
@ -419,7 +419,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
|||
logger.debug("[{}] adding mapping [{}] (source suppressed due to length, use TRACE level if needed)", index, mappingType);
|
||||
}
|
||||
// we don't apply default, since it has been applied when the mappings were parsed initially
|
||||
mapperService.merge(mappingType, mappingSource, false);
|
||||
mapperService.merge(mappingType, mappingSource, false, true);
|
||||
if (!mapperService.documentMapper(mappingType).mappingSource().equals(mappingSource)) {
|
||||
logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index, mappingType, mappingSource, mapperService.documentMapper(mappingType).mappingSource());
|
||||
requiresRefresh = true;
|
||||
|
@ -436,7 +436,7 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
|
|||
logger.debug("[{}] updating mapping [{}] (source suppressed due to length, use TRACE level if needed)", index, mappingType);
|
||||
}
|
||||
// we don't apply default, since it has been applied when the mappings were parsed initially
|
||||
mapperService.merge(mappingType, mappingSource, false);
|
||||
mapperService.merge(mappingType, mappingSource, false, true);
|
||||
if (!mapperService.documentMapper(mappingType).mappingSource().equals(mappingSource)) {
|
||||
requiresRefresh = true;
|
||||
logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index, mappingType, mappingSource, mapperService.documentMapper(mappingType).mappingSource());
|
||||
|
|
|
@ -46,6 +46,7 @@ public class RestCreateIndexAction extends BaseRestHandler {
|
|||
if (request.hasContent()) {
|
||||
createIndexRequest.source(request.content());
|
||||
}
|
||||
createIndexRequest.updateAllTypes(request.paramAsBoolean("update_all_types", false));
|
||||
createIndexRequest.timeout(request.paramAsTime("timeout", createIndexRequest.timeout()));
|
||||
createIndexRequest.masterNodeTimeout(request.paramAsTime("master_timeout", createIndexRequest.masterNodeTimeout()));
|
||||
client.admin().indices().create(createIndexRequest, new AcknowledgedRestListener<CreateIndexResponse>(channel));
|
||||
|
|
|
@ -69,6 +69,7 @@ public class RestPutMappingAction extends BaseRestHandler {
|
|||
PutMappingRequest putMappingRequest = putMappingRequest(Strings.splitStringByCommaToArray(request.param("index")));
|
||||
putMappingRequest.type(request.param("type"));
|
||||
putMappingRequest.source(request.content().toUtf8());
|
||||
putMappingRequest.updateAllTypes(request.paramAsBoolean("update_all_types", false));
|
||||
putMappingRequest.timeout(request.paramAsTime("timeout", putMappingRequest.timeout()));
|
||||
putMappingRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putMappingRequest.masterNodeTimeout()));
|
||||
putMappingRequest.indicesOptions(IndicesOptions.fromRequest(request, putMappingRequest.indicesOptions()));
|
||||
|
|
|
@ -268,7 +268,7 @@ public class TermVectorsUnitTests extends ElasticsearchTestCase {
|
|||
ft.setStoreTermVectorPositions(true);
|
||||
String ftOpts = AbstractFieldMapper.termVectorOptionsToString(ft);
|
||||
assertThat("with_positions_payloads", equalTo(ftOpts));
|
||||
AllFieldMapper.Builder builder = new AllFieldMapper.Builder();
|
||||
AllFieldMapper.Builder builder = new AllFieldMapper.Builder(null);
|
||||
boolean exceptiontrown = false;
|
||||
try {
|
||||
TypeParsers.parseTermVector("", ftOpts, builder);
|
||||
|
|
|
@ -104,7 +104,7 @@ public class SimpleExistsTests extends ElasticsearchIntegrationTest {
|
|||
createIndex("test");
|
||||
client().prepareIndex("test", "type1", "1").setSource("field", 2).execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "2").setSource("field", 5).execute().actionGet();
|
||||
client().prepareIndex("test", "type", "XXX1").setSource("field", "value").execute().actionGet();
|
||||
client().prepareIndex("test", "type", "XXX1").setSource("str_field", "value").execute().actionGet();
|
||||
ensureGreen();
|
||||
refresh();
|
||||
ExistsResponse existsResponse = client().prepareExists("test").setQuery(QueryBuilders.rangeQuery("field").gte(6).lte(8)).execute().actionGet();
|
||||
|
|
|
@ -70,21 +70,15 @@ public class RecoveryFromGatewayTests extends ElasticsearchIntegrationTest {
|
|||
.endObject().endObject().string();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", mapping));
|
||||
|
||||
|
||||
client().prepareIndex("test", "type1", "10990239").setSource(jsonBuilder().startObject()
|
||||
.field("_id", "10990239")
|
||||
.startArray("appAccountIds").value(14).value(179).endArray().endObject()).execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "10990473").setSource(jsonBuilder().startObject()
|
||||
.field("_id", "10990473")
|
||||
.startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "10990513").setSource(jsonBuilder().startObject()
|
||||
.field("_id", "10990513")
|
||||
.startArray("appAccountIds").value(14).value(179).endArray().endObject()).execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "10990695").setSource(jsonBuilder().startObject()
|
||||
.field("_id", "10990695")
|
||||
.startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "11026351").setSource(jsonBuilder().startObject()
|
||||
.field("_id", "11026351")
|
||||
.startArray("appAccountIds").value(14).endArray().endObject()).execute().actionGet();
|
||||
|
||||
refresh();
|
||||
|
|
|
@ -25,12 +25,14 @@ import org.elasticsearch.action.ShardOperationFailedException;
|
|||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.get.*;
|
||||
import org.elasticsearch.action.get.GetRequestBuilder;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequestBuilder;
|
||||
import org.elasticsearch.action.get.MultiGetResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Base64;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -46,7 +48,13 @@ import java.util.Map;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
|
||||
public class GetActionTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
|
@ -248,87 +256,6 @@ public class GetActionTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(getResponse.getSourceAsMap().get("field").toString(), equalTo(fieldValue));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getFieldsWithDifferentTypes() throws Exception {
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.settingsBuilder().put("index.refresh_interval", -1))
|
||||
.addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject())
|
||||
.addMapping("type2", jsonBuilder().startObject().startObject("type2")
|
||||
.startObject("properties")
|
||||
.startObject("str").field("type", "string").field("store", "yes").endObject()
|
||||
.startObject("strs").field("type", "string").field("store", "yes").endObject()
|
||||
.startObject("int").field("type", "integer").field("store", "yes").endObject()
|
||||
.startObject("ints").field("type", "integer").field("store", "yes").endObject()
|
||||
.startObject("date").field("type", "date").field("store", "yes").endObject()
|
||||
.startObject("binary").field("type", "binary").field("store", "yes").endObject()
|
||||
.endObject()
|
||||
.endObject().endObject()));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource(
|
||||
jsonBuilder().startObject()
|
||||
.field("str", "test")
|
||||
.field("strs", new String[]{"A", "B", "C"})
|
||||
.field("int", 42)
|
||||
.field("ints", new int[]{1, 2, 3, 4})
|
||||
.field("date", "2012-11-13T15:26:14.000Z")
|
||||
.field("binary", Base64.encodeBytes(new byte[]{1, 2, 3}))
|
||||
.endObject()).get();
|
||||
|
||||
client().prepareIndex("test", "type2", "1").setSource(
|
||||
jsonBuilder().startObject()
|
||||
.field("str", "test")
|
||||
.field("strs", new String[]{"A", "B", "C"})
|
||||
.field("int", 42)
|
||||
.field("ints", new int[]{1, 2, 3, 4})
|
||||
.field("date", "2012-11-13T15:26:14.000Z")
|
||||
.field("binary", Base64.encodeBytes(new byte[]{1, 2, 3}))
|
||||
.endObject()).get();
|
||||
|
||||
// realtime get with stored source
|
||||
logger.info("--> realtime get (from source)");
|
||||
GetResponse getResponse = client().prepareGet("test", "type1", "1").setFields("str", "strs", "int", "ints", "date", "binary").get();
|
||||
assertThat(getResponse.isExists(), equalTo(true));
|
||||
assertThat((String) getResponse.getField("str").getValue(), equalTo("test"));
|
||||
assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C"));
|
||||
assertThat((Long) getResponse.getField("int").getValue(), equalTo(42l));
|
||||
assertThat(getResponse.getField("ints").getValues(), contains((Object) 1L, 2L, 3L, 4L));
|
||||
assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z"));
|
||||
assertThat(getResponse.getField("binary").getValue(), instanceOf(String.class)); // its a String..., not binary mapped
|
||||
|
||||
logger.info("--> realtime get (from stored fields)");
|
||||
getResponse = client().prepareGet("test", "type2", "1").setFields("str", "strs", "int", "ints", "date", "binary").get();
|
||||
assertThat(getResponse.isExists(), equalTo(true));
|
||||
assertThat((String) getResponse.getField("str").getValue(), equalTo("test"));
|
||||
assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C"));
|
||||
assertThat((Integer) getResponse.getField("int").getValue(), equalTo(42));
|
||||
assertThat(getResponse.getField("ints").getValues(), contains((Object) 1, 2, 3, 4));
|
||||
assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z"));
|
||||
assertThat((BytesReference) getResponse.getField("binary").getValue(), equalTo((BytesReference) new BytesArray(new byte[]{1, 2, 3})));
|
||||
|
||||
logger.info("--> flush the index, so we load it from it");
|
||||
flush();
|
||||
|
||||
logger.info("--> non realtime get (from source)");
|
||||
getResponse = client().prepareGet("test", "type1", "1").setFields("str", "strs", "int", "ints", "date", "binary").get();
|
||||
assertThat(getResponse.isExists(), equalTo(true));
|
||||
assertThat((String) getResponse.getField("str").getValue(), equalTo("test"));
|
||||
assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C"));
|
||||
assertThat((Long) getResponse.getField("int").getValue(), equalTo(42l));
|
||||
assertThat(getResponse.getField("ints").getValues(), contains((Object) 1L, 2L, 3L, 4L));
|
||||
assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z"));
|
||||
assertThat(getResponse.getField("binary").getValue(), instanceOf(String.class)); // its a String..., not binary mapped
|
||||
|
||||
logger.info("--> non realtime get (from stored fields)");
|
||||
getResponse = client().prepareGet("test", "type2", "1").setFields("str", "strs", "int", "ints", "date", "binary").get();
|
||||
assertThat(getResponse.isExists(), equalTo(true));
|
||||
assertThat((String) getResponse.getField("str").getValue(), equalTo("test"));
|
||||
assertThat(getResponse.getField("strs").getValues(), contains((Object) "A", "B", "C"));
|
||||
assertThat((Integer) getResponse.getField("int").getValue(), equalTo(42));
|
||||
assertThat(getResponse.getField("ints").getValues(), contains((Object) 1, 2, 3, 4));
|
||||
assertThat((String) getResponse.getField("date").getValue(), equalTo("2012-11-13T15:26:14.000Z"));
|
||||
assertThat((BytesReference) getResponse.getField("binary").getValue(), equalTo((BytesReference) new BytesArray(new byte[]{1, 2, 3})));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetDocWithMultivaluedFields() throws Exception {
|
||||
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
|
@ -965,7 +892,11 @@ public class GetActionTests extends ElasticsearchIntegrationTest {
|
|||
" \"refresh_interval\": \"-1\"\n" +
|
||||
" },\n" +
|
||||
" \"mappings\": {\n" +
|
||||
" \"parentdoc\": {},\n" +
|
||||
" \"parentdoc\": {\n" +
|
||||
" \"_ttl\": {\n" +
|
||||
" \"enabled\": true\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"doc\": {\n" +
|
||||
" \"_parent\": {\n" +
|
||||
" \"type\": \"parentdoc\"\n" +
|
||||
|
|
|
@ -1832,7 +1832,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
|
|||
AnalysisService analysisService = new AnalysisService(index, settings);
|
||||
SimilarityLookupService similarityLookupService = new SimilarityLookupService(index, settings);
|
||||
MapperService mapperService = new MapperService(index, settings, analysisService, null, similarityLookupService, null);
|
||||
DocumentMapper.Builder b = new DocumentMapper.Builder(indexName, settings, rootBuilder);
|
||||
DocumentMapper.Builder b = new DocumentMapper.Builder(indexName, settings, rootBuilder, mapperService);
|
||||
DocumentMapperParser parser = new DocumentMapperParser(index, settings, mapperService, analysisService, similarityLookupService, null);
|
||||
this.docMapper = b.build(mapperService, parser);
|
||||
|
||||
|
|
|
@ -58,10 +58,10 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTests {
|
|||
@Before
|
||||
public void before() throws Exception {
|
||||
mapperService.merge(
|
||||
childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true
|
||||
childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true, false
|
||||
);
|
||||
mapperService.merge(
|
||||
grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true
|
||||
grandChildType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(grandChildType, "_parent", "type=" + childType).string()), true, false
|
||||
);
|
||||
|
||||
Document d = new Document();
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper;
|
|||
|
||||
import com.google.common.collect.Iterators;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -34,77 +33,112 @@ import java.util.Collection;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
public class FieldMappersLookupTests extends ElasticsearchTestCase {
|
||||
public class FieldTypeLookupTests extends ElasticsearchTestCase {
|
||||
|
||||
public void testEmpty() {
|
||||
FieldMappersLookup lookup = new FieldMappersLookup();
|
||||
assertNull(lookup.fullName("foo"));
|
||||
assertNull(lookup.indexName("foo"));
|
||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||
assertNull(lookup.get("foo"));
|
||||
assertNull(lookup.getByIndexName("foo"));
|
||||
Collection<String> names = lookup.simpleMatchToFullName("foo");
|
||||
assertNotNull(names);
|
||||
assertTrue(names.isEmpty());
|
||||
names = lookup.simpleMatchToFullName("foo");
|
||||
names = lookup.simpleMatchToIndexNames("foo");
|
||||
assertNotNull(names);
|
||||
assertTrue(names.isEmpty());
|
||||
assertNull(lookup.smartName("foo"));
|
||||
assertNull(lookup.smartNameFieldMapper("foo"));
|
||||
assertNull(lookup.get("foo"));
|
||||
Iterator<FieldMapper> itr = lookup.iterator();
|
||||
Iterator<MappedFieldType> itr = lookup.iterator();
|
||||
assertNotNull(itr);
|
||||
assertFalse(itr.hasNext());
|
||||
}
|
||||
|
||||
public void testNewField() {
|
||||
FieldMappersLookup lookup = new FieldMappersLookup();
|
||||
public void testAddNewField() {
|
||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||
FakeFieldMapper f = new FakeFieldMapper("foo", "bar");
|
||||
FieldMappersLookup lookup2 = lookup.copyAndAddAll(newList(f));
|
||||
assertNull(lookup.fullName("foo"));
|
||||
assertNull(lookup.indexName("bar"));
|
||||
|
||||
FieldMappers mappers = lookup2.fullName("foo");
|
||||
assertNotNull(mappers);
|
||||
assertEquals(1, mappers.mappers().size());
|
||||
assertEquals(f, mappers.mapper());
|
||||
mappers = lookup2.indexName("bar");
|
||||
assertNotNull(mappers);
|
||||
assertEquals(1, mappers.mappers().size());
|
||||
assertEquals(f, mappers.mapper());
|
||||
FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f));
|
||||
assertNull(lookup.get("foo"));
|
||||
assertNull(lookup.get("bar"));
|
||||
assertNull(lookup.getByIndexName("foo"));
|
||||
assertNull(lookup.getByIndexName("bar"));
|
||||
assertEquals(f.fieldType(), lookup2.get("foo"));
|
||||
assertNull(lookup.get("bar"));
|
||||
assertEquals(f.fieldType(), lookup2.getByIndexName("bar"));
|
||||
assertNull(lookup.getByIndexName("foo"));
|
||||
assertEquals(1, Iterators.size(lookup2.iterator()));
|
||||
}
|
||||
|
||||
public void testExtendField() {
|
||||
FieldMappersLookup lookup = new FieldMappersLookup();
|
||||
FakeFieldMapper f = new FakeFieldMapper("foo", "bar");
|
||||
FakeFieldMapper other = new FakeFieldMapper("blah", "blah");
|
||||
lookup = lookup.copyAndAddAll(newList(f, other));
|
||||
public void testAddExistingField() {
|
||||
FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
|
||||
MappedFieldType originalFieldType = f.fieldType();
|
||||
FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo");
|
||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||
lookup = lookup.copyAndAddAll(newList(f));
|
||||
FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f2));
|
||||
|
||||
assertNotSame(originalFieldType, f.fieldType());
|
||||
assertSame(f.fieldType(), f2.fieldType());
|
||||
assertSame(f.fieldType(), lookup2.get("foo"));
|
||||
assertSame(f.fieldType(), lookup2.getByIndexName("foo"));
|
||||
assertEquals(1, Iterators.size(lookup2.iterator()));
|
||||
}
|
||||
|
||||
public void testAddExistingIndexName() {
|
||||
FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
|
||||
FakeFieldMapper f2 = new FakeFieldMapper("bar", "foo");
|
||||
MappedFieldType originalFieldType = f.fieldType();
|
||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||
lookup = lookup.copyAndAddAll(newList(f));
|
||||
FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f2));
|
||||
|
||||
assertNotSame(originalFieldType, f.fieldType());
|
||||
assertSame(f.fieldType(), f2.fieldType());
|
||||
assertSame(f.fieldType(), lookup2.get("foo"));
|
||||
assertSame(f.fieldType(), lookup2.get("bar"));
|
||||
assertSame(f.fieldType(), lookup2.getByIndexName("foo"));
|
||||
assertEquals(2, Iterators.size(lookup2.iterator()));
|
||||
}
|
||||
|
||||
public void testAddExistingFullName() {
|
||||
FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
|
||||
FakeFieldMapper f2 = new FakeFieldMapper("foo", "bar");
|
||||
FieldMappersLookup lookup2 = lookup.copyAndAddAll(newList(f2));
|
||||
MappedFieldType originalFieldType = f.fieldType();
|
||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||
lookup = lookup.copyAndAddAll(newList(f));
|
||||
FieldTypeLookup lookup2 = lookup.copyAndAddAll(newList(f2));
|
||||
|
||||
FieldMappers mappers = lookup2.fullName("foo");
|
||||
assertNotNull(mappers);
|
||||
assertEquals(2, mappers.mappers().size());
|
||||
|
||||
mappers = lookup2.indexName("bar");
|
||||
assertNotNull(mappers);
|
||||
assertEquals(2, mappers.mappers().size());
|
||||
assertEquals(3, Iterators.size(lookup2.iterator()));
|
||||
assertNotSame(originalFieldType, f.fieldType());
|
||||
assertSame(f.fieldType(), f2.fieldType());
|
||||
assertSame(f.fieldType(), lookup2.get("foo"));
|
||||
assertSame(f.fieldType(), lookup2.getByIndexName("foo"));
|
||||
assertSame(f.fieldType(), lookup2.getByIndexName("bar"));
|
||||
assertEquals(1, Iterators.size(lookup2.iterator()));
|
||||
}
|
||||
|
||||
public void testIndexName() {
|
||||
FakeFieldMapper f1 = new FakeFieldMapper("foo", "foo");
|
||||
FieldMappersLookup lookup = new FieldMappersLookup();
|
||||
lookup = lookup.copyAndAddAll(newList(f1));
|
||||
public void testAddExistingBridgeName() {
|
||||
FakeFieldMapper f = new FakeFieldMapper("foo", "foo");
|
||||
FakeFieldMapper f2 = new FakeFieldMapper("bar", "bar");
|
||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||
lookup = lookup.copyAndAddAll(newList(f, f2));
|
||||
|
||||
FieldMappers mappers = lookup.indexName("foo");
|
||||
assertNotNull(mappers);
|
||||
assertEquals(1, mappers.mappers().size());
|
||||
assertEquals(f1, mappers.mapper());
|
||||
try {
|
||||
FakeFieldMapper f3 = new FakeFieldMapper("foo", "bar");
|
||||
lookup.copyAndAddAll(newList(f3));
|
||||
} catch (IllegalStateException e) {
|
||||
assertTrue(e.getMessage().contains("insane mappings"));
|
||||
}
|
||||
|
||||
try {
|
||||
FakeFieldMapper f3 = new FakeFieldMapper("bar", "foo");
|
||||
lookup.copyAndAddAll(newList(f3));
|
||||
} catch (IllegalStateException e) {
|
||||
assertTrue(e.getMessage().contains("insane mappings"));
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: add tests for validation
|
||||
|
||||
public void testSimpleMatchIndexNames() {
|
||||
FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz");
|
||||
FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo");
|
||||
FieldMappersLookup lookup = new FieldMappersLookup();
|
||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||
lookup = lookup.copyAndAddAll(newList(f1, f2));
|
||||
Collection<String> names = lookup.simpleMatchToIndexNames("b*");
|
||||
assertTrue(names.contains("baz"));
|
||||
|
@ -114,36 +148,22 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
|
|||
public void testSimpleMatchFullNames() {
|
||||
FakeFieldMapper f1 = new FakeFieldMapper("foo", "baz");
|
||||
FakeFieldMapper f2 = new FakeFieldMapper("bar", "boo");
|
||||
FieldMappersLookup lookup = new FieldMappersLookup();
|
||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||
lookup = lookup.copyAndAddAll(newList(f1, f2));
|
||||
Collection<String> names = lookup.simpleMatchToFullName("b*");
|
||||
assertTrue(names.contains("foo"));
|
||||
assertTrue(names.contains("bar"));
|
||||
}
|
||||
|
||||
public void testSmartName() {
|
||||
FakeFieldMapper f1 = new FakeFieldMapper("foo", "realfoo");
|
||||
FakeFieldMapper f2 = new FakeFieldMapper("foo", "realbar");
|
||||
FakeFieldMapper f3 = new FakeFieldMapper("baz", "realfoo");
|
||||
FieldMappersLookup lookup = new FieldMappersLookup();
|
||||
lookup = lookup.copyAndAddAll(newList(f1, f2, f3));
|
||||
|
||||
assertNotNull(lookup.smartName("foo"));
|
||||
assertEquals(2, lookup.smartName("foo").mappers().size());
|
||||
assertNotNull(lookup.smartName("realfoo"));
|
||||
assertEquals(f1, lookup.smartNameFieldMapper("foo"));
|
||||
assertEquals(f2, lookup.smartNameFieldMapper("realbar"));
|
||||
}
|
||||
|
||||
public void testIteratorImmutable() {
|
||||
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
|
||||
FieldMappersLookup lookup = new FieldMappersLookup();
|
||||
FieldTypeLookup lookup = new FieldTypeLookup();
|
||||
lookup = lookup.copyAndAddAll(newList(f1));
|
||||
|
||||
try {
|
||||
Iterator<FieldMapper> itr = lookup.iterator();
|
||||
Iterator<MappedFieldType> itr = lookup.iterator();
|
||||
assertTrue(itr.hasNext());
|
||||
assertEquals(f1, itr.next());
|
||||
assertEquals(f1.fieldType(), itr.next());
|
||||
itr.remove();
|
||||
fail("remove should have failed");
|
||||
} catch (UnsupportedOperationException e) {
|
||||
|
@ -151,23 +171,6 @@ public class FieldMappersLookupTests extends ElasticsearchTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testGetMapper() {
|
||||
FakeFieldMapper f1 = new FakeFieldMapper("foo", "bar");
|
||||
FieldMappersLookup lookup = new FieldMappersLookup();
|
||||
lookup = lookup.copyAndAddAll(newList(f1));
|
||||
|
||||
assertEquals(f1, lookup.get("foo"));
|
||||
assertNull(lookup.get("bar")); // get is only by full name
|
||||
FakeFieldMapper f2 = new FakeFieldMapper("foo", "foo");
|
||||
lookup = lookup.copyAndAddAll(newList(f2));
|
||||
try {
|
||||
lookup.get("foo");
|
||||
fail("get should have enforced foo is unique");
|
||||
} catch (IllegalStateException e) {
|
||||
// expected
|
||||
}
|
||||
}
|
||||
|
||||
static List<FieldMapper> newList(FieldMapper... mapper) {
|
||||
return Lists.newArrayList(mapper);
|
||||
}
|
|
@ -229,11 +229,11 @@ public class CopyToMapperTests extends ElasticsearchSingleNodeTest {
|
|||
|
||||
DocumentMapper docMapperAfter = parser.parse(mappingAfter);
|
||||
|
||||
MergeResult mergeResult = docMapperBefore.merge(docMapperAfter.mapping(), true);
|
||||
MergeResult mergeResult = docMapperBefore.merge(docMapperAfter.mapping(), true, false);
|
||||
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
|
||||
docMapperBefore.merge(docMapperAfter.mapping(), false);
|
||||
docMapperBefore.merge(docMapperAfter.mapping(), false, false);
|
||||
|
||||
fields = docMapperBefore.mappers().getMapper("copy_test").copyTo().copyToFields();
|
||||
|
||||
|
|
|
@ -64,12 +64,12 @@ public class TokenCountFieldMapperTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject().endObject().string();
|
||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
||||
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), true);
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
// Just simulated so merge hasn't happened yet
|
||||
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword"));
|
||||
|
||||
mergeResult = stage1.merge(stage2.mapping(), false);
|
||||
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
// Just simulated so merge hasn't happened yet
|
||||
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard"));
|
||||
|
|
|
@ -351,7 +351,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
|
|||
Map<String, String> config = getConfigurationViaXContent(initialDateFieldMapper);
|
||||
assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy"));
|
||||
|
||||
MergeResult mergeResult = defaultMapper.merge(mergeMapper.mapping(), false);
|
||||
MergeResult mergeResult = defaultMapper.merge(mergeMapper.mapping(), false, false);
|
||||
|
||||
assertThat("Merging resulting in conflicts: " + Arrays.asList(mergeResult.buildConflicts()), mergeResult.hasConflicts(), is(false));
|
||||
assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(DateFieldMapper.class)));
|
||||
|
|
|
@ -486,7 +486,7 @@ public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject().endObject().string();
|
||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
||||
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), false);
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(2));
|
||||
// todo better way of checking conflict?
|
||||
|
@ -498,7 +498,7 @@ public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
|
|||
.field("validate", true).field("normalize", true).endObject().endObject()
|
||||
.endObject().endObject().string();
|
||||
stage2 = parser.parse(stage2Mapping);
|
||||
mergeResult = stage1.merge(stage2.mapping(), false);
|
||||
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -337,7 +337,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest {
|
|||
.field("orientation", "cw").endObject().endObject().endObject().endObject().string();
|
||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
||||
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), false);
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||
// check correct conflicts
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(4));
|
||||
|
@ -365,7 +365,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest {
|
|||
.startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m")
|
||||
.field("tree_levels", 8).field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string();
|
||||
stage2 = parser.parse(stage2Mapping);
|
||||
mergeResult = stage1.merge(stage2.mapping(), false);
|
||||
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||
|
||||
// verify mapping changes, and ensure no failures
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
|
|
|
@ -99,7 +99,7 @@ public class IndexTypeMapperTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject().endObject().string();
|
||||
DocumentMapper mapperDisabled = parser.parse(mappingWithIndexDisabled);
|
||||
|
||||
mapperEnabled.merge(mapperDisabled.mapping(), false);
|
||||
mapperEnabled.merge(mapperDisabled.mapping(), false, false);
|
||||
assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false));
|
||||
}
|
||||
|
||||
|
@ -115,7 +115,7 @@ public class IndexTypeMapperTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject().endObject().string();
|
||||
DocumentMapper disabledMapper = parser.parse(disabledMapping);
|
||||
|
||||
enabledMapper.merge(disabledMapper.mapping(), false);
|
||||
enabledMapper.merge(disabledMapper.mapping(), false, false);
|
||||
assertThat(enabledMapper.indexMapper().enabled(), is(false));
|
||||
}
|
||||
|
||||
|
|
|
@ -176,11 +176,11 @@ public class FieldNamesFieldMapperTests extends ElasticsearchSingleNodeTest {
|
|||
|
||||
DocumentMapper mapperEnabled = parser.parse(enabledMapping);
|
||||
DocumentMapper mapperDisabled = parser.parse(disabledMapping);
|
||||
mapperEnabled.merge(mapperDisabled.mapping(), false);
|
||||
mapperEnabled.merge(mapperDisabled.mapping(), false, false);
|
||||
assertFalse(mapperEnabled.rootMapper(FieldNamesFieldMapper.class).fieldType().isEnabled());
|
||||
|
||||
mapperEnabled = parser.parse(enabledMapping);
|
||||
mapperDisabled.merge(mapperEnabled.mapping(), false);
|
||||
mapperDisabled.merge(mapperEnabled.mapping(), false, false);
|
||||
assertTrue(mapperEnabled.rootMapper(FieldNamesFieldMapper.class).fieldType().isEnabled());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,13 +65,13 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject().endObject().endObject().string();
|
||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
||||
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), true);
|
||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), true, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
// since we are simulating, we should not have the age mapping
|
||||
assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue());
|
||||
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue());
|
||||
// now merge, don't simulate
|
||||
mergeResult = stage1.merge(stage2.mapping(), false);
|
||||
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||
// there is still merge failures
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
// but we have the age in
|
||||
|
@ -90,7 +90,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
|||
DocumentMapper withDynamicMapper = parser.parse(withDynamicMapping);
|
||||
assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
|
||||
|
||||
MergeResult mergeResult = mapper.merge(withDynamicMapper.mapping(), false);
|
||||
MergeResult mergeResult = mapper.merge(withDynamicMapper.mapping(), false, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
assertThat(mapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
|
||||
}
|
||||
|
@ -107,12 +107,12 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject().endObject().endObject().string();
|
||||
DocumentMapper nestedMapper = parser.parse(nestedMapping);
|
||||
|
||||
MergeResult mergeResult = objectMapper.merge(nestedMapper.mapping(), true);
|
||||
MergeResult mergeResult = objectMapper.merge(nestedMapper.mapping(), true, false);
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(1));
|
||||
assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from non-nested to nested"));
|
||||
|
||||
mergeResult = nestedMapper.merge(objectMapper.mapping(), true);
|
||||
mergeResult = nestedMapper.merge(objectMapper.mapping(), true, false);
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(1));
|
||||
assertThat(mergeResult.buildConflicts()[0], equalTo("object mapping [obj] can't be changed from nested to non-nested"));
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
|||
DocumentMapper changed = parser.parse(mapping2);
|
||||
|
||||
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
|
||||
MergeResult mergeResult = existing.merge(changed.mapping(), false);
|
||||
MergeResult mergeResult = existing.merge(changed.mapping(), false, false);
|
||||
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("keyword"));
|
||||
|
@ -151,7 +151,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
|||
DocumentMapper changed = parser.parse(mapping2);
|
||||
|
||||
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("whitespace"));
|
||||
MergeResult mergeResult = existing.merge(changed.mapping(), false);
|
||||
MergeResult mergeResult = existing.merge(changed.mapping(), false, false);
|
||||
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
assertThat(((NamedAnalyzer) existing.mappers().getMapper("field").fieldType().searchAnalyzer()).name(), equalTo("standard"));
|
||||
|
@ -160,7 +160,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
|||
|
||||
public void testConcurrentMergeTest() throws Throwable {
|
||||
final MapperService mapperService = createIndex("test").mapperService();
|
||||
mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), true);
|
||||
mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), true, false);
|
||||
final DocumentMapper documentMapper = mapperService.documentMapper("test");
|
||||
|
||||
DocumentFieldMappers dfm = documentMapper.mappers();
|
||||
|
@ -186,7 +186,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
|
|||
Mapping update = doc.dynamicMappingsUpdate();
|
||||
assert update != null;
|
||||
lastIntroducedFieldName.set(fieldName);
|
||||
mapperService.merge("test", new CompressedXContent(update.toString()), false);
|
||||
mapperService.merge("test", new CompressedXContent(update.toString()), false, false);
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
error.set(t);
|
||||
|
|
|
@ -155,7 +155,7 @@ public class MultiFieldTests extends ElasticsearchSingleNodeTest {
|
|||
stringField("name").store(true)
|
||||
.addMultiField(stringField("indexed").index(true).tokenized(true))
|
||||
.addMultiField(stringField("not_indexed").index(false).store(true))
|
||||
)).build(indexService.mapperService(), mapperParser);
|
||||
), indexService.mapperService()).build(indexService.mapperService(), mapperParser);
|
||||
|
||||
String builtMapping = builderDocMapper.mappingSource().string();
|
||||
// System.out.println(builtMapping);
|
||||
|
|
|
@ -62,10 +62,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
|
|||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json");
|
||||
DocumentMapper docMapper2 = parser.parse(mapping);
|
||||
|
||||
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true);
|
||||
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
|
||||
docMapper.merge(docMapper2.mapping(), false);
|
||||
docMapper.merge(docMapper2.mapping(), false, false);
|
||||
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||
|
||||
|
@ -84,10 +84,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
|
|||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json");
|
||||
DocumentMapper docMapper3 = parser.parse(mapping);
|
||||
|
||||
mergeResult = docMapper.merge(docMapper3.mapping(), true);
|
||||
mergeResult = docMapper.merge(docMapper3.mapping(), true, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
|
||||
docMapper.merge(docMapper3.mapping(), false);
|
||||
docMapper.merge(docMapper3.mapping(), false, false);
|
||||
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||
|
||||
|
@ -100,10 +100,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
|
|||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping4.json");
|
||||
DocumentMapper docMapper4 = parser.parse(mapping);
|
||||
|
||||
mergeResult = docMapper.merge(docMapper4.mapping(), true);
|
||||
mergeResult = docMapper.merge(docMapper4.mapping(), true, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
|
||||
docMapper.merge(docMapper4.mapping(), false);
|
||||
docMapper.merge(docMapper4.mapping(), false, false);
|
||||
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||
|
||||
|
@ -135,10 +135,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
|
|||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json");
|
||||
DocumentMapper docMapper2 = parser.parse(mapping);
|
||||
|
||||
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true);
|
||||
MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), true, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
|
||||
docMapper.merge(docMapper2.mapping(), false);
|
||||
docMapper.merge(docMapper2.mapping(), false, false);
|
||||
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||
|
||||
|
@ -157,10 +157,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
|
|||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json");
|
||||
DocumentMapper docMapper3 = parser.parse(mapping);
|
||||
|
||||
mergeResult = docMapper.merge(docMapper3.mapping(), true);
|
||||
mergeResult = docMapper.merge(docMapper3.mapping(), true, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
|
||||
|
||||
docMapper.merge(docMapper3.mapping(), false);
|
||||
docMapper.merge(docMapper3.mapping(), false, false);
|
||||
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||
|
||||
|
@ -173,12 +173,12 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
|
|||
|
||||
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json");
|
||||
DocumentMapper docMapper4 = parser.parse(mapping);
|
||||
mergeResult = docMapper.merge(docMapper4.mapping(), true);
|
||||
mergeResult = docMapper.merge(docMapper4.mapping(), true, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(mergeResult.buildConflicts()[0], equalTo("mapper [name] has different index values"));
|
||||
assertThat(mergeResult.buildConflicts()[1], equalTo("mapper [name] has different store values"));
|
||||
|
||||
mergeResult = docMapper.merge(docMapper4.mapping(), false);
|
||||
mergeResult = docMapper.merge(docMapper4.mapping(), false, false);
|
||||
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(true));
|
||||
|
||||
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("name").fieldType().indexOptions());
|
||||
|
|
|
@ -46,8 +46,8 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest {
|
|||
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
|
||||
DocumentMapper docMapper = doc("test", settings,
|
||||
rootObject("person")
|
||||
.add(object("name").add(stringField("first").store(true).index(false)))
|
||||
).build(indexService.mapperService(), mapperParser);
|
||||
.add(object("name").add(stringField("first").store(true).index(false))),
|
||||
indexService.mapperService()).build(indexService.mapperService(), mapperParser);
|
||||
|
||||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1.json"));
|
||||
Document doc = docMapper.parse("person", "1", json).rootDoc();
|
||||
|
@ -124,8 +124,8 @@ public class SimpleMapperTests extends ElasticsearchSingleNodeTest {
|
|||
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
|
||||
DocumentMapper docMapper = doc("test", settings,
|
||||
rootObject("person")
|
||||
.add(object("name").add(stringField("first").store(true).index(false)))
|
||||
).build(indexService.mapperService(), mapperParser);
|
||||
.add(object("name").add(stringField("first").store(true).index(false))),
|
||||
indexService.mapperService()).build(indexService.mapperService(), mapperParser);
|
||||
|
||||
BytesReference json = new BytesArray("".getBytes(Charsets.UTF_8));
|
||||
try {
|
||||
|
|
|
@ -112,7 +112,7 @@ public class SizeMappingTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject().endObject().string();
|
||||
DocumentMapper disabledMapper = parser.parse(disabledMapping);
|
||||
|
||||
enabledMapper.merge(disabledMapper.mapping(), false);
|
||||
enabledMapper.merge(disabledMapper.mapping(), false, false);
|
||||
assertThat(enabledMapper.SizeFieldMapper().enabled(), is(false));
|
||||
}
|
||||
}
|
|
@ -193,7 +193,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject().endObject().string();
|
||||
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true);
|
||||
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true, false);
|
||||
|
||||
DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").v1();
|
||||
assertThat(mapper.type(), equalTo("my_type"));
|
||||
|
@ -206,12 +206,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject().endObject().string();
|
||||
|
||||
MapperService mapperService = createIndex("test").mapperService();
|
||||
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true);
|
||||
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedXContent(defaultMapping), true, false);
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type")
|
||||
.startObject("_source").field("enabled", true).endObject()
|
||||
.endObject().endObject().string();
|
||||
mapperService.merge("my_type", new CompressedXContent(mapping), true);
|
||||
mapperService.merge("my_type", new CompressedXContent(mapping), true, false);
|
||||
|
||||
DocumentMapper mapper = mapperService.documentMapper("my_type");
|
||||
assertThat(mapper.type(), equalTo("my_type"));
|
||||
|
@ -221,7 +221,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
|
|||
void assertConflicts(String mapping1, String mapping2, DocumentMapperParser parser, String... conflicts) throws IOException {
|
||||
DocumentMapper docMapper = parser.parse(mapping1);
|
||||
docMapper = parser.parse(docMapper.mappingSource().string());
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true);
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true, false);
|
||||
|
||||
List<String> expectedConflicts = new ArrayList<>(Arrays.asList(conflicts));
|
||||
for (String conflict : mergeResult.buildConflicts()) {
|
||||
|
|
|
@ -498,7 +498,7 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest {
|
|||
String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject()
|
||||
.endObject().endObject().endObject().endObject().string();
|
||||
MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), false);
|
||||
MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), false, false);
|
||||
assertFalse(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts());
|
||||
|
||||
doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
|
||||
|
@ -513,7 +513,7 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest {
|
|||
updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject()
|
||||
.endObject().endObject().endObject().endObject().string();
|
||||
mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), true);
|
||||
mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), true, false);
|
||||
assertTrue(mergeResult.hasConflicts());
|
||||
assertEquals(1, mergeResult.buildConflicts().length);
|
||||
assertTrue(mergeResult.buildConflicts()[0].contains("cannot enable norms"));
|
||||
|
|
|
@ -153,7 +153,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject().endObject().string();
|
||||
DocumentMapper disabledMapper = parser.parse(disabledMapping);
|
||||
|
||||
enabledMapper.merge(disabledMapper.mapping(), false);
|
||||
enabledMapper.merge(disabledMapper.mapping(), false, false);
|
||||
|
||||
assertThat(enabledMapper.timestampFieldMapper().enabled(), is(false));
|
||||
}
|
||||
|
@ -514,7 +514,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
|
|||
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), false);
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), false, false);
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(0));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getLoading(), equalTo(MappedFieldType.Loading.EAGER));
|
||||
assertThat(docMapper.timestampFieldMapper().fieldType().fieldDataType().getFormat(indexSettings), equalTo("array"));
|
||||
|
@ -582,7 +582,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true);
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false);
|
||||
List<String> expectedConflicts = new ArrayList<>(Arrays.asList(
|
||||
"mapper [_timestamp] has different index values",
|
||||
"mapper [_timestamp] has different store values",
|
||||
|
@ -621,7 +621,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
|
|||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true);
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), true, false);
|
||||
List<String> expectedConflicts = new ArrayList<>();
|
||||
expectedConflicts.add("mapper [_timestamp] has different index values");
|
||||
expectedConflicts.add("mapper [_timestamp] has different tokenize values");
|
||||
|
@ -681,7 +681,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
|
|||
void assertConflict(String mapping1, String mapping2, DocumentMapperParser parser, String conflict) throws IOException {
|
||||
DocumentMapper docMapper = parser.parse(mapping1);
|
||||
docMapper = parser.parse(docMapper.mappingSource().string());
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true);
|
||||
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true, false);
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(conflict == null ? 0 : 1));
|
||||
if (conflict != null) {
|
||||
assertThat(mergeResult.buildConflicts()[0], containsString(conflict));
|
||||
|
|
|
@ -119,7 +119,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
DocumentMapper mapperWithoutTtl = parser.parse(mappingWithoutTtl);
|
||||
DocumentMapper mapperWithTtl = parser.parse(mappingWithTtl);
|
||||
|
||||
MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl.mapping(), false);
|
||||
MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl.mapping(), false, false);
|
||||
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(true));
|
||||
|
@ -145,7 +145,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
DocumentMapper initialMapper = parser.parse(mappingWithTtl);
|
||||
DocumentMapper updatedMapper = parser.parse(updatedMapping);
|
||||
|
||||
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true);
|
||||
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true, false);
|
||||
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(false));
|
||||
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
|
||||
|
@ -159,7 +159,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
DocumentMapper initialMapper = parser.parse(mappingWithTtl);
|
||||
DocumentMapper updatedMapper = parser.parse(mappingWithTtlDisabled);
|
||||
|
||||
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true);
|
||||
MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), true, false);
|
||||
|
||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
|
||||
|
@ -197,7 +197,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
public void testNoConflictIfNothingSetAndDisabledLater() throws Exception {
|
||||
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
|
||||
XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d");
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean());
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDisabled.string()), true).mapping(), randomBoolean(), false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
}
|
||||
|
||||
|
@ -205,7 +205,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
public void testNoConflictIfNothingSetAndEnabledLater() throws Exception {
|
||||
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
|
||||
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean());
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), randomBoolean(), false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
}
|
||||
|
||||
|
@ -214,7 +214,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtlEnabled);
|
||||
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false);
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
|
@ -227,7 +227,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterCreation, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false);
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), true).mapping(), false, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
assertThat(mappingAfterMerge, equalTo(new CompressedXContent("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
|
||||
|
@ -241,7 +241,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
IndexService indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithTtl);
|
||||
CompressedXContent mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d");
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true);
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlDifferentDefault.string()), true).mapping(), true, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
// make sure simulate flag actually worked - no mappings applied
|
||||
CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
|
@ -253,7 +253,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
|
||||
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled();
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true);
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
// make sure simulate flag actually worked - no mappings applied
|
||||
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
|
@ -265,7 +265,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
|
||||
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true);
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), true, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
// make sure simulate flag actually worked - no mappings applied
|
||||
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
|
@ -276,7 +276,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
mappingWithoutTtl = getMappingWithTtlDisabled("6d");
|
||||
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type", mappingWithoutTtl);
|
||||
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false);
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
// make sure simulate flag actually worked - mappings applied
|
||||
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
|
@ -286,7 +286,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
|
|||
// check if switching simulate flag off works if nothing was applied in the beginning
|
||||
indexService = createIndex("testindex", Settings.settingsBuilder().build(), "type");
|
||||
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false);
|
||||
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingWithTtlEnabled.string()), true).mapping(), false, false);
|
||||
assertFalse(mergeResult.hasConflicts());
|
||||
// make sure simulate flag actually worked - mappings applied
|
||||
mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
|
|
|
@ -82,7 +82,7 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
|
|||
private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException {
|
||||
IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping);
|
||||
// simulate like in MetaDataMappingService#putMapping
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false);
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), false, false);
|
||||
// assure we have no conflicts
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(0));
|
||||
// make sure mappings applied
|
||||
|
@ -106,7 +106,7 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
|
|||
IndexService indexService = createIndex("test", Settings.settingsBuilder().build(), "type", mapping);
|
||||
CompressedXContent mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource();
|
||||
// simulate like in MetaDataMappingService#putMapping
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true);
|
||||
MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedXContent(mappingUpdate.bytes()), true).mapping(), true, false);
|
||||
// assure we have conflicts
|
||||
assertThat(mergeResult.buildConflicts().length, equalTo(1));
|
||||
// make sure simulate flag actually worked - no mappings applied
|
||||
|
|
|
@ -57,7 +57,7 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin
|
|||
|
||||
MapperService mapperService = indexService.mapperService();
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
|
||||
mapperService.merge("person", new CompressedXContent(mapping), true);
|
||||
mapperService.merge("person", new CompressedXContent(mapping), true, false);
|
||||
ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
|
||||
assertNotNull(doc.dynamicMappingsUpdate());
|
||||
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();
|
||||
|
|
|
@ -59,7 +59,7 @@ public class IndexQueryParserFilterDateRangeTimezoneTests extends ElasticsearchS
|
|||
|
||||
MapperService mapperService = indexService.mapperService();
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
|
||||
mapperService.merge("person", new CompressedXContent(mapping), true);
|
||||
mapperService.merge("person", new CompressedXContent(mapping), true, false);
|
||||
ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
|
||||
assertNotNull(doc.dynamicMappingsUpdate());
|
||||
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();
|
||||
|
|
|
@ -92,7 +92,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
|
|||
MapperService mapperService = indexService.mapperService();
|
||||
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
|
||||
mapperService.merge("person", new CompressedXContent(mapping), true);
|
||||
mapperService.merge("person", new CompressedXContent(mapping), true, false);
|
||||
ParsedDocument doc = mapperService.documentMapper("person").parse("person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
|
||||
assertNotNull(doc.dynamicMappingsUpdate());
|
||||
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();
|
||||
|
|
|
@ -64,8 +64,8 @@ public abstract class AbstractChildTests extends ElasticsearchSingleNodeTest {
|
|||
MapperService mapperService = indexService.mapperService();
|
||||
// Parent/child parsers require that the parent and child type to be presented in mapping
|
||||
// Sometimes we want a nested object field in the parent type that triggers nonNestedDocsFilter to be used
|
||||
mapperService.merge(parentType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true);
|
||||
mapperService.merge(childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true);
|
||||
mapperService.merge(parentType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(parentType, "nested_field", random().nextBoolean() ? "type=nested" : "type=object").string()), true, false);
|
||||
mapperService.merge(childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType, CHILD_SCORE_NAME, "type=double,doc_values=false").string()), true, false);
|
||||
return createSearchContext(indexService);
|
||||
}
|
||||
|
||||
|
|
|
@ -185,16 +185,13 @@ public class RecoveryPercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
logger.info("--> Add dummy docs");
|
||||
client().prepareIndex("test", "type1", "1").setSource("field1", 0).get();
|
||||
client().prepareIndex("test", "type2", "1").setSource("field1", "0").get();
|
||||
client().prepareIndex("test", "type2", "1").setSource("field1", 1).get();
|
||||
|
||||
logger.info("--> register a queries");
|
||||
for (int i = 1; i <= 100; i++) {
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, Integer.toString(i))
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.field("query", rangeQuery("field1").from(0).to(i))
|
||||
// The type must be set now, because two fields with the same name exist in different types.
|
||||
// Setting the type to `type1`, makes sure that the range query gets parsed to a Lucene NumericRangeQuery.
|
||||
.field("type", "type1")
|
||||
.endObject())
|
||||
.get();
|
||||
}
|
||||
|
|
|
@ -117,7 +117,7 @@ public class NestedAggregatorTest extends ElasticsearchSingleNodeTest {
|
|||
IndexSearcher searcher = new IndexSearcher(directoryReader);
|
||||
|
||||
IndexService indexService = createIndex("test");
|
||||
indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true);
|
||||
indexService.mapperService().merge("test", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("test", "nested_field", "type=nested").string()), true, false);
|
||||
SearchContext searchContext = createSearchContext(indexService);
|
||||
AggregationContext context = new AggregationContext(searchContext);
|
||||
|
||||
|
|
|
@ -790,7 +790,7 @@ public class ChildQuerySearchTests extends ElasticsearchIntegrationTest {
|
|||
client().prepareIndex("test", "child", "2").setParent("1").setSource("c_field", 1).get();
|
||||
client().admin().indices().prepareFlush("test").get();
|
||||
|
||||
client().prepareIndex("test", "type1", "3").setSource("p_field", "p_value1").get();
|
||||
client().prepareIndex("test", "type1", "3").setSource("p_field", 2).get();
|
||||
client().admin().indices().prepareFlush("test").get();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
|
@ -1163,7 +1163,7 @@ public class ChildQuerySearchTests extends ElasticsearchIntegrationTest {
|
|||
.addMapping("child1"));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1", "_parent", "bla").get();
|
||||
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get();
|
||||
try {
|
||||
client().prepareIndex("test", "child1", "c1").setParent("p1").setSource("c_field", "blue").get();
|
||||
fail();
|
||||
|
|
|
@ -151,7 +151,8 @@ public class ParentFieldLoadingBwcTest extends ElasticsearchIntegrationTest {
|
|||
assertAcked(prepareCreate("test")
|
||||
.setSettings(indexSettings)
|
||||
.addMapping("parent")
|
||||
.addMapping("child", childMapping(MappedFieldType.Loading.LAZY)));
|
||||
.addMapping("child", childMapping(MappedFieldType.Loading.LAZY))
|
||||
.setUpdateAllTypes(true));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "parent", "1").setSource("{}").get();
|
||||
|
@ -166,7 +167,8 @@ public class ParentFieldLoadingBwcTest extends ElasticsearchIntegrationTest {
|
|||
assertAcked(prepareCreate("test")
|
||||
.setSettings(indexSettings)
|
||||
.addMapping("parent")
|
||||
.addMapping("child", "_parent", "type=parent"));
|
||||
.addMapping("child", "_parent", "type=parent")
|
||||
.setUpdateAllTypes(true));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "parent", "1").setSource("{}").get();
|
||||
|
@ -182,7 +184,8 @@ public class ParentFieldLoadingBwcTest extends ElasticsearchIntegrationTest {
|
|||
assertAcked(prepareCreate("test")
|
||||
.setSettings(indexSettings)
|
||||
.addMapping("parent")
|
||||
.addMapping("child", childMapping(MappedFieldType.Loading.EAGER)));
|
||||
.addMapping("child", childMapping(MappedFieldType.Loading.EAGER))
|
||||
.setUpdateAllTypes(true));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "parent", "1").setSource("{}").get();
|
||||
|
@ -195,9 +198,10 @@ public class ParentFieldLoadingBwcTest extends ElasticsearchIntegrationTest {
|
|||
logger.info("testing eager global ordinals loading...");
|
||||
assertAcked(client().admin().indices().prepareDelete("test").get());
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(indexSettings)
|
||||
.addMapping("parent")
|
||||
.addMapping("child", childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS)));
|
||||
.setSettings(indexSettings)
|
||||
.addMapping("parent")
|
||||
.addMapping("child", childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS))
|
||||
.setUpdateAllTypes(true));
|
||||
ensureGreen();
|
||||
|
||||
// Need to do 2 separate refreshes, otherwise we have 1 segment and then we can't measure if global ordinals
|
||||
|
@ -229,6 +233,7 @@ public class ParentFieldLoadingBwcTest extends ElasticsearchIntegrationTest {
|
|||
|
||||
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("child")
|
||||
.setSource(childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS))
|
||||
.setUpdateAllTypes(true)
|
||||
.get();
|
||||
assertAcked(putMappingResponse);
|
||||
assertBusy(new Runnable() {
|
||||
|
|
|
@ -43,8 +43,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest {
|
||||
|
||||
private final Settings indexSettings = Settings.builder()
|
||||
|
@ -55,7 +53,6 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest {
|
|||
.put(MergePolicyConfig.INDEX_MERGE_ENABLED, false)
|
||||
.build();
|
||||
|
||||
@Test
|
||||
public void testEagerParentFieldLoading() throws Exception {
|
||||
logger.info("testing lazy loading...");
|
||||
assertAcked(prepareCreate("test")
|
||||
|
@ -120,7 +117,6 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest {
|
|||
assertThat(response.getIndicesStats().getFieldData().getMemorySizeInBytes(), greaterThan(0l));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChangingEagerParentFieldLoadingAtRuntime() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(indexSettings)
|
||||
|
@ -137,6 +133,7 @@ public class ParentFieldLoadingTest extends ElasticsearchIntegrationTest {
|
|||
|
||||
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("child")
|
||||
.setSource(childMapping(MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS))
|
||||
.setUpdateAllTypes(true)
|
||||
.get();
|
||||
assertAcked(putMappingResponse);
|
||||
assertBusy(new Runnable() {
|
||||
|
|
|
@ -634,7 +634,8 @@ public class SearchQueryTests extends ElasticsearchIntegrationTest {
|
|||
.endObject().endObject())
|
||||
.addMapping("type2", jsonBuilder().startObject().startObject("type2")
|
||||
.startObject("_type").field("index", index).endObject()
|
||||
.endObject().endObject()));
|
||||
.endObject().endObject())
|
||||
.setUpdateAllTypes(true));
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("field1", "value1"),
|
||||
client().prepareIndex("test", "type2", "1").setSource("field1", "value1"),
|
||||
client().prepareIndex("test", "type1", "2").setSource("field1", "value1"),
|
||||
|
|
Loading…
Reference in New Issue