Mappings: Validate dynamic mappings updates on the master node.

This commit changes dynamic mappings updates so that they are synchronous on the
entire cluster and their validity is checked by the master node. There are some
important consequences of this commit:
 - a failing index request on a non-existing type does not implicitely create
   the type anymore
 - dynamic mappings updates cannot create inconsistent mappings on different
   shards
 - indexing requests that introduce new fields might induce latency spikes
   because of the overhead to update the mappings on the master node

Close #8688
This commit is contained in:
Adrien Grand 2015-04-16 18:16:00 +02:00
parent ca03e40605
commit 1adf232bb2
55 changed files with 1119 additions and 1057 deletions

View File

@ -12,7 +12,7 @@
indices.get_mapping:
index: test_index
- match: { test_index.mappings.type_1.properties: {}}
- match: { test_index.mappings.type_1: {}}
---
"Create index with settings":
@ -106,7 +106,7 @@
indices.get_mapping:
index: test_index
- match: { test_index.mappings.type_1.properties: {}}
- match: { test_index.mappings.type_1: {}}
- do:
indices.get_settings:

View File

@ -21,10 +21,10 @@ setup:
- do:
indices.get_mapping: {}
- match: { test_1.mappings.type_1.properties: {}}
- match: { test_1.mappings.type_2.properties: {}}
- match: { test_2.mappings.type_2.properties: {}}
- match: { test_2.mappings.type_3.properties: {}}
- match: { test_1.mappings.type_1: {}}
- match: { test_1.mappings.type_2: {}}
- match: { test_2.mappings.type_2: {}}
- match: { test_2.mappings.type_3: {}}
---
"Get /{index}/_mapping":
@ -33,8 +33,8 @@ setup:
indices.get_mapping:
index: test_1
- match: { test_1.mappings.type_1.properties: {}}
- match: { test_1.mappings.type_2.properties: {}}
- match: { test_1.mappings.type_1: {}}
- match: { test_1.mappings.type_2: {}}
- is_false: test_2
@ -46,8 +46,8 @@ setup:
index: test_1
type: _all
- match: { test_1.mappings.type_1.properties: {}}
- match: { test_1.mappings.type_2.properties: {}}
- match: { test_1.mappings.type_1: {}}
- match: { test_1.mappings.type_2: {}}
- is_false: test_2
---
@ -58,8 +58,8 @@ setup:
index: test_1
type: '*'
- match: { test_1.mappings.type_1.properties: {}}
- match: { test_1.mappings.type_2.properties: {}}
- match: { test_1.mappings.type_1: {}}
- match: { test_1.mappings.type_2: {}}
- is_false: test_2
---
@ -70,7 +70,7 @@ setup:
index: test_1
type: type_1
- match: { test_1.mappings.type_1.properties: {}}
- match: { test_1.mappings.type_1: {}}
- is_false: test_1.mappings.type_2
- is_false: test_2
@ -82,8 +82,8 @@ setup:
index: test_1
type: type_1,type_2
- match: { test_1.mappings.type_1.properties: {}}
- match: { test_1.mappings.type_2.properties: {}}
- match: { test_1.mappings.type_1: {}}
- match: { test_1.mappings.type_2: {}}
- is_false: test_2
---
@ -94,7 +94,7 @@ setup:
index: test_1
type: '*2'
- match: { test_1.mappings.type_2.properties: {}}
- match: { test_1.mappings.type_2: {}}
- is_false: test_1.mappings.type_1
- is_false: test_2
@ -105,8 +105,8 @@ setup:
indices.get_mapping:
type: type_2
- match: { test_1.mappings.type_2.properties: {}}
- match: { test_2.mappings.type_2.properties: {}}
- match: { test_1.mappings.type_2: {}}
- match: { test_2.mappings.type_2: {}}
- is_false: test_1.mappings.type_1
- is_false: test_2.mappings.type_3
@ -118,8 +118,8 @@ setup:
index: _all
type: type_2
- match: { test_1.mappings.type_2.properties: {}}
- match: { test_2.mappings.type_2.properties: {}}
- match: { test_1.mappings.type_2: {}}
- match: { test_2.mappings.type_2: {}}
- is_false: test_1.mappings.type_1
- is_false: test_2.mappings.type_3
@ -131,8 +131,8 @@ setup:
index: '*'
type: type_2
- match: { test_1.mappings.type_2.properties: {}}
- match: { test_2.mappings.type_2.properties: {}}
- match: { test_1.mappings.type_2: {}}
- match: { test_2.mappings.type_2: {}}
- is_false: test_1.mappings.type_1
- is_false: test_2.mappings.type_3
@ -144,8 +144,8 @@ setup:
index: test_1,test_2
type: type_2
- match: { test_1.mappings.type_2.properties: {}}
- match: { test_2.mappings.type_2.properties: {}}
- match: { test_1.mappings.type_2: {}}
- match: { test_2.mappings.type_2: {}}
- is_false: test_2.mappings.type_3
---
@ -156,6 +156,6 @@ setup:
index: '*2'
type: type_2
- match: { test_2.mappings.type_2.properties: {}}
- match: { test_2.mappings.type_2: {}}
- is_false: test_1
- is_false: test_2.mappings.type_3

View File

@ -56,8 +56,8 @@ setup:
indices.get_mapping:
index: test-x*
- match: { test-xxx.mappings.type_1.properties: {}}
- match: { test-xxy.mappings.type_2.properties: {}}
- match: { test-xxx.mappings.type_1: {}}
- match: { test-xxy.mappings.type_2: {}}
---
"Get test-* with wildcard_expansion=all":
@ -67,9 +67,9 @@ setup:
index: test-x*
expand_wildcards: all
- match: { test-xxx.mappings.type_1.properties: {}}
- match: { test-xxy.mappings.type_2.properties: {}}
- match: { test-xyy.mappings.type_3.properties: {}}
- match: { test-xxx.mappings.type_1: {}}
- match: { test-xxy.mappings.type_2: {}}
- match: { test-xyy.mappings.type_3: {}}
---
"Get test-* with wildcard_expansion=open":
@ -79,8 +79,8 @@ setup:
index: test-x*
expand_wildcards: open
- match: { test-xxx.mappings.type_1.properties: {}}
- match: { test-xxy.mappings.type_2.properties: {}}
- match: { test-xxx.mappings.type_1: {}}
- match: { test-xxy.mappings.type_2: {}}
---
"Get test-* with wildcard_expansion=closed":
@ -90,7 +90,7 @@ setup:
index: test-x*
expand_wildcards: closed
- match: { test-xyy.mappings.type_3.properties: {}}
- match: { test-xyy.mappings.type_3: {}}
---
"Get test-* with wildcard_expansion=none":
@ -110,8 +110,8 @@ setup:
index: test-x*
expand_wildcards: open,closed
- match: { test-xxx.mappings.type_1.properties: {}}
- match: { test-xxy.mappings.type_2.properties: {}}
- match: { test-xyy.mappings.type_3.properties: {}}
- match: { test-xxx.mappings.type_1: {}}
- match: { test-xxy.mappings.type_2: {}}
- match: { test-xyy.mappings.type_3: {}}

View File

@ -1,40 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchWrapperException;
import org.elasticsearch.common.Nullable;
public class WriteFailureException extends ElasticsearchException implements ElasticsearchWrapperException {
@Nullable
private final String mappingTypeToUpdate;
public WriteFailureException(Throwable cause, String mappingTypeToUpdate) {
super(null, cause);
assert cause != null;
this.mappingTypeToUpdate = mappingTypeToUpdate;
}
public String getMappingTypeToUpdate() {
return mappingTypeToUpdate;
}
}

View File

@ -19,14 +19,12 @@
package org.elasticsearch.action.bulk;
import com.google.common.collect.Sets;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionWriteResponse;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.WriteFailureException;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexRequest;
@ -44,26 +42,27 @@ import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.DocumentAlreadyExistsException;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.river.RiverIndexName;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportService;
import java.util.Map;
import java.util.Set;
/**
* Performs the index operation.
@ -134,7 +133,6 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
final BulkShardRequest request = shardRequest.request;
IndexService indexService = indicesService.indexServiceSafe(request.index());
IndexShard indexShard = indexService.shardSafe(shardRequest.shardId.id());
final Set<String> mappingTypesToUpdate = Sets.newHashSet();
long[] preVersions = new long[request.items().length];
VersionType[] preVersionTypes = new VersionType[request.items().length];
@ -145,20 +143,10 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
preVersions[requestIndex] = indexRequest.version();
preVersionTypes[requestIndex] = indexRequest.versionType();
try {
try {
WriteResult result = shardIndexOperation(request, indexRequest, clusterState, indexShard, true);
// add the response
IndexResponse indexResponse = result.response();
setResponse(item, new BulkItemResponse(item.id(), indexRequest.opType().lowercase(), indexResponse));
if (result.mappingTypeToUpdate != null) {
mappingTypesToUpdate.add(result.mappingTypeToUpdate);
}
} catch (WriteFailureException e) {
if (e.getMappingTypeToUpdate() != null) {
mappingTypesToUpdate.add(e.getMappingTypeToUpdate());
}
throw e.getCause();
}
WriteResult result = shardIndexOperation(request, indexRequest, clusterState, indexShard, indexService, true);
// add the response
IndexResponse indexResponse = result.response();
setResponse(item, new BulkItemResponse(item.id(), indexRequest.opType().lowercase(), indexResponse));
} catch (Throwable e) {
// rethrow the failure if we are going to retry on primary and let parent failure to handle it
if (retryPrimaryException(e)) {
@ -166,12 +154,6 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
for (int j = 0; j < requestIndex; j++) {
applyVersion(request.items()[j], preVersions[j], preVersionTypes[j]);
}
for (String mappingTypeToUpdate : mappingTypesToUpdate) {
DocumentMapper docMapper = indexService.mapperService().documentMapper(mappingTypeToUpdate);
if (docMapper != null) {
mappingUpdatedAction.updateMappingOnMaster(indexService.index().name(), docMapper, indexService.indexUUID());
}
}
throw (ElasticsearchException) e;
}
if (e instanceof ElasticsearchException && ((ElasticsearchException) e).status() == RestStatus.CONFLICT) {
@ -230,7 +212,7 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
for (int updateAttemptsCount = 0; updateAttemptsCount <= updateRequest.retryOnConflict(); updateAttemptsCount++) {
UpdateResult updateResult;
try {
updateResult = shardUpdateOperation(clusterState, request, updateRequest, indexShard);
updateResult = shardUpdateOperation(clusterState, request, updateRequest, indexShard, indexService);
} catch (Throwable t) {
updateResult = new UpdateResult(null, null, false, t, null);
}
@ -250,9 +232,6 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
}
item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), indexRequest);
setResponse(item, new BulkItemResponse(item.id(), OP_TYPE_UPDATE, updateResponse));
if (result.mappingTypeToUpdate != null) {
mappingTypesToUpdate.add(result.mappingTypeToUpdate);
}
break;
case DELETE:
DeleteResponse response = updateResult.writeResult.response();
@ -331,13 +310,6 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
assert preVersionTypes[requestIndex] != null;
}
for (String mappingTypToUpdate : mappingTypesToUpdate) {
DocumentMapper docMapper = indexService.mapperService().documentMapper(mappingTypToUpdate);
if (docMapper != null) {
mappingUpdatedAction.updateMappingOnMaster(indexService.index().name(), docMapper, indexService.indexUUID());
}
}
if (request.refresh()) {
try {
indexShard.refresh("refresh_flag_bulk");
@ -363,12 +335,10 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
static class WriteResult {
final ActionWriteResponse response;
final String mappingTypeToUpdate;
final Engine.IndexingOperation op;
WriteResult(ActionWriteResponse response, String mappingTypeToUpdate, Engine.IndexingOperation op) {
WriteResult(ActionWriteResponse response, Engine.IndexingOperation op) {
this.response = response;
this.mappingTypeToUpdate = mappingTypeToUpdate;
this.op = op;
}
@ -382,8 +352,25 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
}
private void applyMappingUpdate(IndexService indexService, String type, Mapping update) throws Throwable {
// HACK: Rivers seem to have something specific that triggers potential
// deadlocks when doing concurrent indexing. So for now they keep the
// old behaviour of updating mappings locally first and then
// asynchronously notifying the master
// this can go away when rivers are removed
final String indexName = indexService.index().name();
final String indexUUID = indexService.indexUUID();
if (indexName.equals(RiverIndexName.Conf.indexName(settings))) {
indexService.mapperService().merge(type, new CompressedString(update.toBytes()), true);
mappingUpdatedAction.updateMappingOnMaster(indexName, indexUUID, type, update, null);
} else {
mappingUpdatedAction.updateMappingOnMasterSynchronously(indexName, indexUUID, type, update);
indexService.mapperService().merge(type, new CompressedString(update.toBytes()), true);
}
}
private WriteResult shardIndexOperation(BulkShardRequest request, IndexRequest indexRequest, ClusterState clusterState,
IndexShard indexShard, boolean processed) {
IndexShard indexShard, IndexService indexService, boolean processed) throws Throwable {
// validate, if routing is required, that we got routing
MappingMetaData mappingMd = clusterState.metaData().index(request.index()).mappingOrDefault(indexRequest.type());
@ -400,45 +387,38 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
SourceToParse sourceToParse = SourceToParse.source(SourceToParse.Origin.PRIMARY, indexRequest.source()).type(indexRequest.type()).id(indexRequest.id())
.routing(indexRequest.routing()).parent(indexRequest.parent()).timestamp(indexRequest.timestamp()).ttl(indexRequest.ttl());
// update mapping on master if needed, we won't update changes to the same type, since once its changed, it won't have mappers added
String mappingTypeToUpdate = null;
long version;
boolean created;
Engine.IndexingOperation op;
try {
if (indexRequest.opType() == IndexRequest.OpType.INDEX) {
Engine.Index index = indexShard.prepareIndex(sourceToParse, indexRequest.version(), indexRequest.versionType(), Engine.Operation.Origin.PRIMARY, request.canHaveDuplicates() || indexRequest.canHaveDuplicates());
if (index.parsedDoc().mappingsModified()) {
mappingTypeToUpdate = indexRequest.type();
}
indexShard.index(index);
version = index.version();
op = index;
created = index.created();
} else {
Engine.Create create = indexShard.prepareCreate(sourceToParse, indexRequest.version(), indexRequest.versionType(), Engine.Operation.Origin.PRIMARY,
request.canHaveDuplicates() || indexRequest.canHaveDuplicates(), indexRequest.autoGeneratedId());
if (create.parsedDoc().mappingsModified()) {
mappingTypeToUpdate = indexRequest.type();
}
indexShard.create(create);
version = create.version();
op = create;
created = true;
if (indexRequest.opType() == IndexRequest.OpType.INDEX) {
Engine.Index index = indexShard.prepareIndex(sourceToParse, indexRequest.version(), indexRequest.versionType(), Engine.Operation.Origin.PRIMARY, request.canHaveDuplicates() || indexRequest.canHaveDuplicates());
if (index.parsedDoc().dynamicMappingsUpdate() != null) {
applyMappingUpdate(indexService, indexRequest.type(), index.parsedDoc().dynamicMappingsUpdate());
}
// update the version on request so it will happen on the replicas
indexRequest.versionType(indexRequest.versionType().versionTypeForReplicationAndRecovery());
indexRequest.version(version);
} catch (Throwable t) {
throw new WriteFailureException(t, mappingTypeToUpdate);
indexShard.index(index);
version = index.version();
op = index;
created = index.created();
} else {
Engine.Create create = indexShard.prepareCreate(sourceToParse, indexRequest.version(), indexRequest.versionType(), Engine.Operation.Origin.PRIMARY,
request.canHaveDuplicates() || indexRequest.canHaveDuplicates(), indexRequest.autoGeneratedId());
if (create.parsedDoc().dynamicMappingsUpdate() != null) {
applyMappingUpdate(indexService, indexRequest.type(), create.parsedDoc().dynamicMappingsUpdate());
}
indexShard.create(create);
version = create.version();
op = create;
created = true;
}
// update the version on request so it will happen on the replicas
indexRequest.versionType(indexRequest.versionType().versionTypeForReplicationAndRecovery());
indexRequest.version(version);
assert indexRequest.versionType().validateVersionForWrites(indexRequest.version());
IndexResponse indexResponse = new IndexResponse(request.index(), indexRequest.type(), indexRequest.id(), version, created);
return new WriteResult(indexResponse, mappingTypeToUpdate, op);
return new WriteResult(indexResponse, op);
}
private WriteResult shardDeleteOperation(BulkShardRequest request, DeleteRequest deleteRequest, IndexShard indexShard) {
@ -451,7 +431,7 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
assert deleteRequest.versionType().validateVersionForWrites(deleteRequest.version());
DeleteResponse deleteResponse = new DeleteResponse(request.index(), deleteRequest.type(), deleteRequest.id(), delete.version(), delete.found());
return new WriteResult(deleteResponse, null, null);
return new WriteResult(deleteResponse, null);
}
static class UpdateResult {
@ -507,14 +487,14 @@ public class TransportShardBulkAction extends TransportShardReplicationOperation
}
private UpdateResult shardUpdateOperation(ClusterState clusterState, BulkShardRequest bulkShardRequest, UpdateRequest updateRequest, IndexShard indexShard) {
private UpdateResult shardUpdateOperation(ClusterState clusterState, BulkShardRequest bulkShardRequest, UpdateRequest updateRequest, IndexShard indexShard, IndexService indexService) {
UpdateHelper.Result translate = updateHelper.prepare(updateRequest, indexShard);
switch (translate.operation()) {
case UPSERT:
case INDEX:
IndexRequest indexRequest = translate.action();
try {
WriteResult result = shardIndexOperation(bulkShardRequest, indexRequest, clusterState, indexShard, false);
WriteResult result = shardIndexOperation(bulkShardRequest, indexRequest, clusterState, indexShard, indexService, false);
return new UpdateResult(translate, indexRequest, result);
} catch (Throwable t) {
t = ExceptionsHelper.unwrapCause(t);

View File

@ -22,7 +22,6 @@ package org.elasticsearch.action.index;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.WriteFailureException;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction;
@ -38,15 +37,17 @@ import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.river.RiverIndexName;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -166,6 +167,23 @@ public class TransportIndexAction extends TransportShardReplicationOperationActi
.indexShards(clusterService.state(), request.concreteIndex(), request.request().type(), request.request().id(), request.request().routing());
}
private void applyMappingUpdate(IndexService indexService, String type, Mapping update) throws Throwable {
// HACK: Rivers seem to have something specific that triggers potential
// deadlocks when doing concurrent indexing. So for now they keep the
// old behaviour of updating mappings locally first and then
// asynchronously notifying the master
// this can go away when rivers are removed
final String indexName = indexService.index().name();
final String indexUUID = indexService.indexUUID();
if (indexName.equals(RiverIndexName.Conf.indexName(settings))) {
indexService.mapperService().merge(type, new CompressedString(update.toBytes()), true);
mappingUpdatedAction.updateMappingOnMaster(indexName, indexUUID, type, update, null);
} else {
mappingUpdatedAction.updateMappingOnMasterSynchronously(indexName, indexUUID, type, update);
indexService.mapperService().merge(type, new CompressedString(update.toBytes()), true);
}
}
@Override
protected Tuple<IndexResponse, IndexRequest> shardOperationOnPrimary(ClusterState clusterState, PrimaryOperationRequest shardRequest) throws Throwable {
final IndexRequest request = shardRequest.request;
@ -186,48 +204,38 @@ public class TransportIndexAction extends TransportShardReplicationOperationActi
long version;
boolean created;
try {
if (request.opType() == IndexRequest.OpType.INDEX) {
Engine.Index index = indexShard.prepareIndex(sourceToParse, request.version(), request.versionType(), Engine.Operation.Origin.PRIMARY, request.canHaveDuplicates());
if (index.parsedDoc().mappingsModified()) {
mappingUpdatedAction.updateMappingOnMaster(shardRequest.shardId.getIndex(), index.docMapper(), indexService.indexUUID());
}
indexShard.index(index);
version = index.version();
created = index.created();
} else {
Engine.Create create = indexShard.prepareCreate(sourceToParse,
request.version(), request.versionType(), Engine.Operation.Origin.PRIMARY, request.canHaveDuplicates(), request.autoGeneratedId());
if (create.parsedDoc().mappingsModified()) {
mappingUpdatedAction.updateMappingOnMaster(shardRequest.shardId.getIndex(), create.docMapper(), indexService.indexUUID());
}
indexShard.create(create);
version = create.version();
created = true;
if (request.opType() == IndexRequest.OpType.INDEX) {
Engine.Index index = indexShard.prepareIndex(sourceToParse, request.version(), request.versionType(), Engine.Operation.Origin.PRIMARY, request.canHaveDuplicates());
if (index.parsedDoc().dynamicMappingsUpdate() != null) {
applyMappingUpdate(indexService, request.type(), index.parsedDoc().dynamicMappingsUpdate());
}
if (request.refresh()) {
try {
indexShard.refresh("refresh_flag_index");
} catch (Throwable e) {
// ignore
}
indexShard.index(index);
version = index.version();
created = index.created();
} else {
Engine.Create create = indexShard.prepareCreate(sourceToParse,
request.version(), request.versionType(), Engine.Operation.Origin.PRIMARY, request.canHaveDuplicates(), request.autoGeneratedId());
if (create.parsedDoc().dynamicMappingsUpdate() != null) {
applyMappingUpdate(indexService, request.type(), create.parsedDoc().dynamicMappingsUpdate());
}
// update the version on the request, so it will be used for the replicas
request.version(version);
request.versionType(request.versionType().versionTypeForReplicationAndRecovery());
assert request.versionType().validateVersionForWrites(request.version());
return new Tuple<>(new IndexResponse(shardRequest.shardId.getIndex(), request.type(), request.id(), version, created), shardRequest.request);
} catch (WriteFailureException e) {
if (e.getMappingTypeToUpdate() != null){
DocumentMapper docMapper = indexService.mapperService().documentMapper(e.getMappingTypeToUpdate());
if (docMapper != null) {
mappingUpdatedAction.updateMappingOnMaster(indexService.index().name(), docMapper, indexService.indexUUID());
}
}
throw e.getCause();
indexShard.create(create);
version = create.version();
created = true;
}
if (request.refresh()) {
try {
indexShard.refresh("refresh_flag_index");
} catch (Throwable e) {
// ignore
}
}
// update the version on the request, so it will be used for the replicas
request.version(version);
request.versionType(request.versionType().versionTypeForReplicationAndRecovery());
assert request.versionType().validateVersionForWrites(request.version());
return new Tuple<>(new IndexResponse(shardRequest.shardId.getIndex(), request.type(), request.id(), version, created), shardRequest.request);
}
@Override

View File

@ -19,9 +19,10 @@
package org.elasticsearch.cluster.action.index;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
@ -37,7 +38,6 @@ import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaDataMappingService;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
@ -46,20 +46,20 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.TimeoutException;
/**
* Called by shards in the cluster when their mapping was dynamically updated and it needs to be updated
@ -67,24 +67,23 @@ import java.util.concurrent.atomic.AtomicLong;
*/
public class MappingUpdatedAction extends TransportMasterNodeOperationAction<MappingUpdatedAction.MappingUpdatedRequest, MappingUpdatedAction.MappingUpdatedResponse> {
public static final String INDICES_MAPPING_ADDITIONAL_MAPPING_CHANGE_TIME = "indices.mapping.additional_mapping_change_time";
public static final String INDICES_MAPPING_DYNAMIC_TIMEOUT = "indices.mapping.dynamic_timeout";
public static final String ACTION_NAME = "internal:cluster/mapping_updated";
private final AtomicLong mappingUpdateOrderGen = new AtomicLong();
private final MetaDataMappingService metaDataMappingService;
private volatile MasterMappingUpdater masterMappingUpdater;
private volatile TimeValue additionalMappingChangeTime;
private volatile TimeValue dynamicMappingUpdateTimeout;
class ApplySettings implements NodeSettingsService.Listener {
@Override
public void onRefreshSettings(Settings settings) {
final TimeValue current = MappingUpdatedAction.this.additionalMappingChangeTime;
final TimeValue newValue = settings.getAsTime(INDICES_MAPPING_ADDITIONAL_MAPPING_CHANGE_TIME, current);
TimeValue current = MappingUpdatedAction.this.dynamicMappingUpdateTimeout;
TimeValue newValue = settings.getAsTime(INDICES_MAPPING_DYNAMIC_TIMEOUT, current);
if (!current.equals(newValue)) {
logger.info("updating " + INDICES_MAPPING_ADDITIONAL_MAPPING_CHANGE_TIME + " from [{}] to [{}]", current, newValue);
MappingUpdatedAction.this.additionalMappingChangeTime = newValue;
logger.info("updating " + INDICES_MAPPING_DYNAMIC_TIMEOUT + " from [{}] to [{}]", current, newValue);
MappingUpdatedAction.this.dynamicMappingUpdateTimeout = newValue;
}
}
}
@ -94,8 +93,7 @@ public class MappingUpdatedAction extends TransportMasterNodeOperationAction<Map
MetaDataMappingService metaDataMappingService, NodeSettingsService nodeSettingsService, ActionFilters actionFilters) {
super(settings, ACTION_NAME, transportService, clusterService, threadPool, actionFilters);
this.metaDataMappingService = metaDataMappingService;
// this setting should probably always be 0, just add the option to wait for more changes within a time window
this.additionalMappingChangeTime = settings.getAsTime(INDICES_MAPPING_ADDITIONAL_MAPPING_CHANGE_TIME, TimeValue.timeValueMillis(0));
this.dynamicMappingUpdateTimeout = settings.getAsTime(INDICES_MAPPING_DYNAMIC_TIMEOUT, TimeValue.timeValueSeconds(30));
nodeSettingsService.addListener(new ApplySettings());
}
@ -109,13 +107,58 @@ public class MappingUpdatedAction extends TransportMasterNodeOperationAction<Map
this.masterMappingUpdater = null;
}
public void updateMappingOnMaster(String index, DocumentMapper documentMapper, String indexUUID) {
updateMappingOnMaster(index, documentMapper, indexUUID, null);
public void updateMappingOnMaster(String index, String indexUUID, String type, Mapping mappingUpdate, MappingUpdateListener listener) {
if (type.equals(MapperService.DEFAULT_MAPPING)) {
throw new ElasticsearchIllegalArgumentException("_default_ mapping should not be updated");
}
try {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
mappingUpdate.toXContent(builder, new ToXContent.MapParams(ImmutableMap.<String, String>of()));
final CompressedString mappingSource = new CompressedString(builder.endObject().bytes());
masterMappingUpdater.add(new MappingChange(index, indexUUID, type, mappingSource, listener));
} catch (IOException bogus) {
throw new AssertionError("Cannot happen", bogus);
}
}
public void updateMappingOnMaster(String index, DocumentMapper documentMapper, String indexUUID, MappingUpdateListener listener) {
assert !documentMapper.type().equals(MapperService.DEFAULT_MAPPING) : "_default_ mapping should not be updated";
masterMappingUpdater.add(new MappingChange(documentMapper, index, indexUUID, listener));
/**
* Same as {@link #updateMappingOnMasterSynchronously(String, String, String, Mapping, TimeValue)}
* using the default timeout.
*/
public void updateMappingOnMasterSynchronously(String index, String indexUUID, String type, Mapping mappingUpdate) throws Throwable {
updateMappingOnMasterSynchronously(index, indexUUID, type, mappingUpdate, dynamicMappingUpdateTimeout);
}
/**
* Update mappings synchronously on the master node, waiting for at most
* {@code timeout}. When this method returns successfully mappings have
* been applied to the master node and propagated to data nodes.
*/
public void updateMappingOnMasterSynchronously(String index, String indexUUID, String type, Mapping mappingUpdate, TimeValue timeout) throws Throwable {
final CountDownLatch latch = new CountDownLatch(1);
final Throwable[] cause = new Throwable[1];
final MappingUpdateListener listener = new MappingUpdateListener() {
@Override
public void onMappingUpdate() {
latch.countDown();
}
@Override
public void onFailure(Throwable t) {
cause[0] = t;
latch.countDown();
}
};
updateMappingOnMaster(index, indexUUID, type, mappingUpdate, listener);
if (!latch.await(timeout.getMillis(), TimeUnit.MILLISECONDS)) {
throw new TimeoutException("Time out while waiting for the master node to validate a mapping update for type [" + type + "]");
}
if (cause[0] != null) {
throw cause[0];
}
}
@Override
@ -142,7 +185,7 @@ public class MappingUpdatedAction extends TransportMasterNodeOperationAction<Map
@Override
protected void masterOperation(final MappingUpdatedRequest request, final ClusterState state, final ActionListener<MappingUpdatedResponse> listener) throws ElasticsearchException {
metaDataMappingService.updateMapping(request.index(), request.indexUUID(), request.type(), request.mappingSource(), request.order, request.nodeId, new ActionListener<ClusterStateUpdateResponse>() {
metaDataMappingService.updateMapping(request.index(), request.indexUUID(), request.type(), request.mappingSource(), request.nodeId, new ActionListener<ClusterStateUpdateResponse>() {
@Override
public void onResponse(ClusterStateUpdateResponse response) {
listener.onResponse(new MappingUpdatedResponse());
@ -174,18 +217,16 @@ public class MappingUpdatedAction extends TransportMasterNodeOperationAction<Map
private String indexUUID = IndexMetaData.INDEX_UUID_NA_VALUE;
private String type;
private CompressedString mappingSource;
private long order = -1; // -1 means not set...
private String nodeId = null; // null means not set
MappingUpdatedRequest() {
}
public MappingUpdatedRequest(String index, String indexUUID, String type, CompressedString mappingSource, long order, String nodeId) {
public MappingUpdatedRequest(String index, String indexUUID, String type, CompressedString mappingSource, String nodeId) {
this.index = index;
this.indexUUID = indexUUID;
this.type = type;
this.mappingSource = mappingSource;
this.order = order;
this.nodeId = nodeId;
}
@ -215,13 +256,6 @@ public class MappingUpdatedAction extends TransportMasterNodeOperationAction<Map
return mappingSource;
}
/**
* Returns -1 if not set...
*/
public long order() {
return this.order;
}
/**
* Returns null for not set.
*/
@ -241,7 +275,6 @@ public class MappingUpdatedAction extends TransportMasterNodeOperationAction<Map
type = in.readString();
mappingSource = CompressedString.readCompressedString(in);
indexUUID = in.readString();
order = in.readLong();
nodeId = in.readOptionalString();
}
@ -252,7 +285,6 @@ public class MappingUpdatedAction extends TransportMasterNodeOperationAction<Map
out.writeString(type);
mappingSource.writeTo(out);
out.writeString(indexUUID);
out.writeLong(order);
out.writeOptionalString(nodeId);
}
@ -263,15 +295,17 @@ public class MappingUpdatedAction extends TransportMasterNodeOperationAction<Map
}
private static class MappingChange {
public final DocumentMapper documentMapper;
public final String index;
public final String indexUUID;
public final String type;
public final CompressedString mappingSource;
public final MappingUpdateListener listener;
MappingChange(DocumentMapper documentMapper, String index, String indexUUID, MappingUpdateListener listener) {
this.documentMapper = documentMapper;
MappingChange(String index, String indexUUID, String type, CompressedString mappingSource, MappingUpdateListener listener) {
this.index = index;
this.indexUUID = indexUUID;
this.type = type;
this.mappingSource = mappingSource;
this.listener = listener;
}
}
@ -313,142 +347,59 @@ public class MappingUpdatedAction extends TransportMasterNodeOperationAction<Map
this.interrupt();
}
class UpdateKey {
public final String indexUUID;
public final String type;
UpdateKey(String indexUUID, String type) {
this.indexUUID = indexUUID;
this.type = type;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UpdateKey updateKey = (UpdateKey) o;
if (!indexUUID.equals(updateKey.indexUUID)) {
return false;
}
if (!type.equals(updateKey.type)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = indexUUID.hashCode();
result = 31 * result + type.hashCode();
return result;
}
}
class UpdateValue {
public final MappingChange mainChange;
public final List<MappingUpdateListener> listeners = Lists.newArrayList();
UpdateValue(MappingChange mainChange) {
this.mainChange = mainChange;
}
public void notifyListeners(@Nullable Throwable t) {
for (MappingUpdateListener listener : listeners) {
try {
if (t == null) {
listener.onMappingUpdate();
} else {
listener.onFailure(t);
}
} catch (Throwable lisFailure) {
logger.warn("unexpected failure on mapping update listener callback [{}]", lisFailure, listener);
}
}
}
}
@Override
public void run() {
Map<UpdateKey, UpdateValue> pendingUpdates = Maps.newHashMap();
while (running) {
MappingUpdateListener listener = null;
try {
MappingChange polledChange = queue.poll(10, TimeUnit.MINUTES);
if (polledChange == null) {
final MappingChange change = queue.poll(10, TimeUnit.MINUTES);
if (change == null) {
continue;
}
List<MappingChange> changes = Lists.newArrayList(polledChange);
if (additionalMappingChangeTime.millis() > 0) {
Thread.sleep(additionalMappingChangeTime.millis());
}
queue.drainTo(changes);
Collections.reverse(changes); // process then in newest one to oldest
// go over and add to pending updates map
for (MappingChange change : changes) {
UpdateKey key = new UpdateKey(change.indexUUID, change.documentMapper.type());
UpdateValue updateValue = pendingUpdates.get(key);
if (updateValue == null) {
updateValue = new UpdateValue(change);
pendingUpdates.put(key, updateValue);
}
listener = change.listener;
final MappingUpdatedAction.MappingUpdatedRequest mappingRequest;
try {
DiscoveryNode node = clusterService.localNode();
mappingRequest = new MappingUpdatedAction.MappingUpdatedRequest(
change.index, change.indexUUID, change.type, change.mappingSource, node != null ? node.id() : null
);
} catch (Throwable t) {
logger.warn("Failed to update master on updated mapping for index [" + change.index + "], type [" + change.type + "]", t);
if (change.listener != null) {
updateValue.listeners.add(change.listener);
change.listener.onFailure(t);
}
continue;
}
for (Iterator<UpdateValue> iterator = pendingUpdates.values().iterator(); iterator.hasNext(); ) {
final UpdateValue updateValue = iterator.next();
iterator.remove();
MappingChange change = updateValue.mainChange;
final MappingUpdatedAction.MappingUpdatedRequest mappingRequest;
try {
// we generate the order id before we get the mapping to send and refresh the source, so
// if 2 happen concurrently, we know that the later order will include the previous one
long orderId = mappingUpdateOrderGen.incrementAndGet();
change.documentMapper.refreshSource();
DiscoveryNode node = clusterService.localNode();
mappingRequest = new MappingUpdatedAction.MappingUpdatedRequest(
change.index, change.indexUUID, change.documentMapper.type(), change.documentMapper.mappingSource(), orderId, node != null ? node.id() : null
);
} catch (Throwable t) {
logger.warn("Failed to update master on updated mapping for index [" + change.index + "], type [" + change.documentMapper.type() + "]", t);
updateValue.notifyListeners(t);
continue;
logger.trace("sending mapping updated to master: {}", mappingRequest);
execute(mappingRequest, new ActionListener<MappingUpdatedAction.MappingUpdatedResponse>() {
@Override
public void onResponse(MappingUpdatedAction.MappingUpdatedResponse mappingUpdatedResponse) {
logger.debug("successfully updated master with mapping update: {}", mappingRequest);
if (change.listener != null) {
change.listener.onMappingUpdate();
}
}
logger.trace("sending mapping updated to master: {}", mappingRequest);
execute(mappingRequest, new ActionListener<MappingUpdatedAction.MappingUpdatedResponse>() {
@Override
public void onResponse(MappingUpdatedAction.MappingUpdatedResponse mappingUpdatedResponse) {
logger.debug("successfully updated master with mapping update: {}", mappingRequest);
updateValue.notifyListeners(null);
}
@Override
public void onFailure(Throwable e) {
logger.warn("failed to update master on updated mapping for {}", e, mappingRequest);
updateValue.notifyListeners(e);
@Override
public void onFailure(Throwable e) {
logger.warn("failed to update master on updated mapping for {}", e, mappingRequest);
if (change.listener != null) {
change.listener.onFailure(e);
}
});
}
}
});
} catch (Throwable t) {
if (listener != null) {
// even if the failure is expected, eg. if we got interrupted,
// we need to notify the listener as there might be a latch
// waiting for it to be called
listener.onFailure(t);
}
if (t instanceof InterruptedException && !running) {
// all is well, we are shutting down
} else {
logger.warn("failed to process mapping updates", t);
}
// cleanup all pending update callbacks that were not processed due to a global failure...
for (Iterator<Map.Entry<UpdateKey, UpdateValue>> iterator = pendingUpdates.entrySet().iterator(); iterator.hasNext(); ) {
Map.Entry<UpdateKey, UpdateValue> entry = iterator.next();
iterator.remove();
entry.getValue().notifyListeners(t);
logger.warn("failed to process mapping update", t);
}
}
}

View File

@ -43,9 +43,7 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.indices.IndexMissingException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InvalidTypeNameException;
import org.elasticsearch.indices.TypeMissingException;
import org.elasticsearch.percolator.PercolatorService;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.*;
@ -57,7 +55,6 @@ import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlag
*/
public class MetaDataMappingService extends AbstractComponent {
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final IndicesService indicesService;
@ -68,9 +65,8 @@ public class MetaDataMappingService extends AbstractComponent {
private long refreshOrUpdateProcessedInsertOrder;
@Inject
public MetaDataMappingService(Settings settings, ThreadPool threadPool, ClusterService clusterService, IndicesService indicesService) {
public MetaDataMappingService(Settings settings, ClusterService clusterService, IndicesService indicesService) {
super(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.indicesService = indicesService;
}
@ -97,15 +93,13 @@ public class MetaDataMappingService extends AbstractComponent {
static class UpdateTask extends MappingTask {
final String type;
final CompressedString mappingSource;
final long order; // -1 for unknown
final String nodeId; // null fr unknown
final ActionListener<ClusterStateUpdateResponse> listener;
UpdateTask(String index, String indexUUID, String type, CompressedString mappingSource, long order, String nodeId, ActionListener<ClusterStateUpdateResponse> listener) {
UpdateTask(String index, String indexUUID, String type, CompressedString mappingSource, String nodeId, ActionListener<ClusterStateUpdateResponse> listener) {
super(index, indexUUID);
this.type = type;
this.mappingSource = mappingSource;
this.order = order;
this.nodeId = nodeId;
this.listener = listener;
}
@ -176,35 +170,7 @@ public class MetaDataMappingService extends AbstractComponent {
logger.debug("[{}] ignoring task [{}] - index meta data doesn't match task uuid", index, task);
continue;
}
boolean add = true;
// if its an update task, make sure we only process the latest ordered one per node
if (task instanceof UpdateTask) {
UpdateTask uTask = (UpdateTask) task;
// we can only do something to compare if we have the order && node
if (uTask.order != -1 && uTask.nodeId != null) {
for (int i = 0; i < tasks.size(); i++) {
MappingTask existing = tasks.get(i);
if (existing instanceof UpdateTask) {
UpdateTask eTask = (UpdateTask) existing;
if (eTask.type.equals(uTask.type)) {
// if we have the order, and the node id, then we can compare, and replace if applicable
if (eTask.order != -1 && eTask.nodeId != null) {
if (eTask.nodeId.equals(uTask.nodeId) && uTask.order > eTask.order) {
// a newer update task, we can replace so we execute it one!
tasks.set(i, uTask);
add = false;
break;
}
}
}
}
}
}
}
if (add) {
tasks.add(task);
}
tasks.add(task);
}
// construct the actual index if needed, and make sure the relevant mappings are there
@ -365,13 +331,13 @@ public class MetaDataMappingService extends AbstractComponent {
});
}
public void updateMapping(final String index, final String indexUUID, final String type, final CompressedString mappingSource, final long order, final String nodeId, final ActionListener<ClusterStateUpdateResponse> listener) {
public void updateMapping(final String index, final String indexUUID, final String type, final CompressedString mappingSource, final String nodeId, final ActionListener<ClusterStateUpdateResponse> listener) {
final long insertOrder;
synchronized (refreshOrUpdateMutex) {
insertOrder = ++refreshOrUpdateInsertOrder;
refreshOrUpdateQueue.add(new UpdateTask(index, indexUUID, type, mappingSource, order, nodeId, listener));
refreshOrUpdateQueue.add(new UpdateTask(index, indexUUID, type, mappingSource, nodeId, listener));
}
clusterService.submitStateUpdateTask("update-mapping [" + index + "][" + type + "] / node [" + nodeId + "], order [" + order + "]", Priority.HIGH, new ProcessedClusterStateUpdateTask() {
clusterService.submitStateUpdateTask("update-mapping [" + index + "][" + type + "] / node [" + nodeId + "]", Priority.HIGH, new ProcessedClusterStateUpdateTask() {
private volatile List<MappingTask> allTasks;
@Override
@ -398,7 +364,7 @@ public class MetaDataMappingService extends AbstractComponent {
try {
uTask.listener.onResponse(response);
} catch (Throwable t) {
logger.debug("failed ot ping back on response of mapping processing for task [{}]", t, uTask.listener);
logger.debug("failed to ping back on response of mapping processing for task [{}]", t, uTask.listener);
}
}
}
@ -457,7 +423,7 @@ public class MetaDataMappingService extends AbstractComponent {
newMapper = indexService.mapperService().parse(request.type(), new CompressedString(request.source()), existingMapper == null);
if (existingMapper != null) {
// first, simulate
DocumentMapper.MergeResult mergeResult = existingMapper.merge(newMapper, mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = existingMapper.merge(newMapper.mapping(), mergeFlags().simulate(true));
// if we have conflicts, and we are not supposed to ignore them, throw an exception
if (!request.ignoreConflicts() && mergeResult.hasConflicts()) {
throw new MergeMappingException(mergeResult.conflicts());

View File

@ -68,7 +68,7 @@ public class ClusterDynamicSettingsModule extends AbstractModule {
clusterDynamicSettings.addDynamicSetting(IndicesStore.INDICES_STORE_THROTTLE_TYPE);
clusterDynamicSettings.addDynamicSetting(IndicesStore.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, Validator.BYTES_SIZE);
clusterDynamicSettings.addDynamicSetting(IndicesTTLService.INDICES_TTL_INTERVAL, Validator.TIME);
clusterDynamicSettings.addDynamicSetting(MappingUpdatedAction.INDICES_MAPPING_ADDITIONAL_MAPPING_CHANGE_TIME, Validator.TIME);
clusterDynamicSettings.addDynamicSetting(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT, Validator.TIME);
clusterDynamicSettings.addDynamicSetting(MetaData.SETTING_READ_ONLY);
clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, Validator.BYTES_SIZE);
clusterDynamicSettings.addDynamicSetting(RecoverySettings.INDICES_RECOVERY_TRANSLOG_OPS, Validator.INTEGER);

View File

@ -33,6 +33,7 @@ import org.elasticsearch.common.util.CancellableThreads;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.EngineException;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.IndexShard;
@ -44,10 +45,11 @@ import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
import java.io.IOException;
import java.util.Arrays;
import java.util.Set;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
/**
*
@ -61,7 +63,7 @@ public class IndexShardGateway extends AbstractIndexShardComponent implements Cl
private final TimeValue waitForMappingUpdatePostRecovery;
private final TimeValue syncInterval;
private volatile ScheduledFuture flushScheduler;
private volatile ScheduledFuture<?> flushScheduler;
private final CancellableThreads cancellableThreads = new CancellableThreads();
@ -74,7 +76,7 @@ public class IndexShardGateway extends AbstractIndexShardComponent implements Cl
this.indexService = indexService;
this.indexShard = indexShard;
this.waitForMappingUpdatePostRecovery = indexSettings.getAsTime("index.gateway.wait_for_mapping_update_post_recovery", TimeValue.timeValueSeconds(30));
this.waitForMappingUpdatePostRecovery = indexSettings.getAsTime("index.gateway.wait_for_mapping_update_post_recovery", TimeValue.timeValueMinutes(15));
syncInterval = indexSettings.getAsTime("index.gateway.sync", TimeValue.timeValueSeconds(5));
if (syncInterval.millis() > 0) {
this.indexShard.translog().syncOnEachOperation(false);
@ -93,7 +95,7 @@ public class IndexShardGateway extends AbstractIndexShardComponent implements Cl
public void recover(boolean indexShouldExists, RecoveryState recoveryState) throws IndexShardGatewayRecoveryException {
indexShard.prepareForIndexRecovery();
long version = -1;
final Set<String> typesToUpdate;
final Map<String, Mapping> typesToUpdate;
SegmentInfos si = null;
indexShard.store().incRef();
try {
@ -149,41 +151,49 @@ public class IndexShardGateway extends AbstractIndexShardComponent implements Cl
typesToUpdate = indexShard.performTranslogRecovery();
indexShard.finalizeRecovery();
for (Map.Entry<String, Mapping> entry : typesToUpdate.entrySet()) {
validateMappingUpdate(entry.getKey(), entry.getValue());
}
indexShard.postRecovery("post recovery from gateway");
} catch (EngineException e) {
throw new IndexShardGatewayRecoveryException(shardId, "failed to recovery from gateway", e);
} finally {
indexShard.store().decRef();
}
for (final String type : typesToUpdate) {
final CountDownLatch latch = new CountDownLatch(1);
mappingUpdatedAction.updateMappingOnMaster(indexService.index().name(), indexService.mapperService().documentMapper(type), indexService.indexUUID(), new MappingUpdatedAction.MappingUpdateListener() {
@Override
public void onMappingUpdate() {
latch.countDown();
}
}
@Override
public void onFailure(Throwable t) {
latch.countDown();
logger.debug("failed to send mapping update post recovery to master for [{}]", t, type);
}
});
cancellableThreads.execute(new CancellableThreads.Interruptable() {
@Override
public void run() throws InterruptedException {
try {
if (latch.await(waitForMappingUpdatePostRecovery.millis(), TimeUnit.MILLISECONDS) == false) {
logger.debug("waited for mapping update on master for [{}], yet timed out", type);
private void validateMappingUpdate(final String type, Mapping update) {
final CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<Throwable> error = new AtomicReference<>();
mappingUpdatedAction.updateMappingOnMaster(indexService.index().name(), indexService.indexUUID(), type, update, new MappingUpdatedAction.MappingUpdateListener() {
@Override
public void onMappingUpdate() {
latch.countDown();
}
@Override
public void onFailure(Throwable t) {
latch.countDown();
error.set(t);
}
});
cancellableThreads.execute(new CancellableThreads.Interruptable() {
@Override
public void run() throws InterruptedException {
try {
if (latch.await(waitForMappingUpdatePostRecovery.millis(), TimeUnit.MILLISECONDS) == false) {
logger.debug("waited for mapping update on master for [{}], yet timed out", type);
} else {
if (error.get() != null) {
throw new IndexShardGatewayRecoveryException(shardId, "Failed to propagate mappings on master post recovery", error.get());
}
} catch (InterruptedException e) {
logger.debug("interrupted while waiting for mapping update");
throw e;
}
} catch (InterruptedException e) {
logger.debug("interrupted while waiting for mapping update");
throw e;
}
});
}
}
});
}
@Override

View File

@ -50,6 +50,7 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.mapper.Mapping.SourceTransform;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
@ -72,7 +73,6 @@ import org.elasticsearch.script.ScriptService.ScriptType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@ -82,8 +82,6 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import static com.google.common.collect.Lists.newArrayList;
/**
*
*/
@ -165,7 +163,7 @@ public class DocumentMapper implements ToXContent {
private Map<Class<? extends RootMapper>, RootMapper> rootMappers = new LinkedHashMap<>();
private List<SourceTransform> sourceTransforms;
private List<SourceTransform> sourceTransforms = new ArrayList<>(1);
private final String index;
@ -213,9 +211,6 @@ public class DocumentMapper implements ToXContent {
}
public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language, Map<String, Object> parameters) {
if (sourceTransforms == null) {
sourceTransforms = new ArrayList<>();
}
sourceTransforms.add(new ScriptTransform(scriptService, script, scriptType, language, parameters));
return this;
}
@ -243,15 +238,9 @@ public class DocumentMapper implements ToXContent {
private final DocumentMapperParser docMapperParser;
private volatile ImmutableMap<String, Object> meta;
private volatile CompressedString mappingSource;
private final RootObjectMapper rootObjectMapper;
private final ImmutableMap<Class<? extends RootMapper>, RootMapper> rootMappers;
private final RootMapper[] rootMappersOrdered;
private final RootMapper[] rootMappersNotIncludedInObject;
private final Mapping mapping;
private volatile DocumentFieldMappers fieldMappers;
@ -267,8 +256,6 @@ public class DocumentMapper implements ToXContent {
private final Object mappersMutex = new Object();
private final List<SourceTransform> sourceTransforms;
public DocumentMapper(String index, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser,
RootObjectMapper rootObjectMapper,
ImmutableMap<String, Object> meta,
@ -278,19 +265,11 @@ public class DocumentMapper implements ToXContent {
this.type = rootObjectMapper.name();
this.typeText = new StringAndBytesText(this.type);
this.docMapperParser = docMapperParser;
this.meta = meta;
this.rootObjectMapper = rootObjectMapper;
this.sourceTransforms = sourceTransforms;
this.rootMappers = ImmutableMap.copyOf(rootMappers);
this.rootMappersOrdered = rootMappers.values().toArray(new RootMapper[rootMappers.values().size()]);
List<RootMapper> rootMappersNotIncludedInObjectLst = newArrayList();
for (RootMapper rootMapper : rootMappersOrdered) {
if (!rootMapper.includeInObject()) {
rootMappersNotIncludedInObjectLst.add(rootMapper);
}
}
this.rootMappersNotIncludedInObject = rootMappersNotIncludedInObjectLst.toArray(new RootMapper[rootMappersNotIncludedInObjectLst.size()]);
this.mapping = new Mapping(
rootObjectMapper,
rootMappers.values().toArray(new RootMapper[rootMappers.values().size()]),
sourceTransforms.toArray(new SourceTransform[sourceTransforms.size()]),
meta);
this.typeFilter = typeMapper().termFilter(type, null);
@ -300,13 +279,9 @@ public class DocumentMapper implements ToXContent {
}
FieldMapperListener.Aggregator fieldMappersAgg = new FieldMapperListener.Aggregator();
for (RootMapper rootMapper : rootMappersOrdered) {
if (rootMapper.includeInObject()) {
rootObjectMapper.putMapper(rootMapper);
} else {
if (rootMapper instanceof FieldMapper) {
fieldMappersAgg.mappers.add((FieldMapper) rootMapper);
}
for (RootMapper rootMapper : this.mapping.rootMappers) {
if (rootMapper instanceof FieldMapper) {
fieldMappersAgg.mappers.add((FieldMapper) rootMapper);
}
}
@ -332,6 +307,10 @@ public class DocumentMapper implements ToXContent {
refreshSource();
}
public Mapping mapping() {
return mapping;
}
public String type() {
return this.type;
}
@ -341,7 +320,7 @@ public class DocumentMapper implements ToXContent {
}
public ImmutableMap<String, Object> meta() {
return this.meta;
return mapping.meta;
}
public CompressedString mappingSource() {
@ -349,7 +328,7 @@ public class DocumentMapper implements ToXContent {
}
public RootObjectMapper root() {
return this.rootObjectMapper;
return mapping.root;
}
public UidFieldMapper uidMapper() {
@ -358,7 +337,7 @@ public class DocumentMapper implements ToXContent {
@SuppressWarnings({"unchecked"})
public <T extends RootMapper> T rootMapper(Class<T> type) {
return (T) rootMappers.get(type);
return mapping.rootMapper(type);
}
public IndexFieldMapper indexMapper() {
@ -445,13 +424,12 @@ public class DocumentMapper implements ToXContent {
}
source.type(this.type);
boolean mappingsModified = false;
XContentParser parser = source.parser();
try {
if (parser == null) {
parser = XContentHelper.createParser(source.source());
}
if (sourceTransforms != null) {
if (mapping.sourceTransforms.length > 0) {
parser = transform(parser);
}
context.reset(parser, new ParseContext.Document(), source, listener);
@ -471,35 +449,14 @@ public class DocumentMapper implements ToXContent {
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
}
for (RootMapper rootMapper : rootMappersOrdered) {
for (RootMapper rootMapper : mapping.rootMappers) {
rootMapper.preParse(context);
}
if (!emptyDoc) {
Mapper update = rootObjectMapper.parse(context);
for (RootObjectMapper mapper : context.updates()) {
if (update == null) {
update = mapper;
} else {
MapperUtils.merge(update, mapper);
}
}
Mapper update = mapping.root.parse(context);
if (update != null) {
// TODO: validate the mapping update on the master node
// lock to avoid concurrency issues with mapping updates coming from the API
synchronized(this) {
// simulate on the first time to check if the mapping update is applicable
MergeContext mergeContext = newMergeContext(new MergeFlags().simulate(true));
rootObjectMapper.merge(update, mergeContext);
if (mergeContext.hasConflicts()) {
throw new MapperParsingException("Could not apply generated dynamic mappings: " + Arrays.toString(mergeContext.buildConflicts()));
} else {
// then apply it for real
mappingsModified = true;
mergeContext = newMergeContext(new MergeFlags().simulate(false));
rootObjectMapper.merge(update, mergeContext);
}
}
context.addDynamicMappingsUpdate((RootObjectMapper) update);
}
}
@ -507,7 +464,7 @@ public class DocumentMapper implements ToXContent {
parser.nextToken();
}
for (RootMapper rootMapper : rootMappersOrdered) {
for (RootMapper rootMapper : mapping.rootMappers) {
rootMapper.postParse(context);
}
} catch (Throwable e) {
@ -548,8 +505,14 @@ public class DocumentMapper implements ToXContent {
}
}
Mapper rootDynamicUpdate = context.dynamicMappingsUpdate();
Mapping update = null;
if (rootDynamicUpdate != null) {
update = mapping.mappingUpdate(rootDynamicUpdate);
}
ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(),
context.source(), mappingsModified).parent(source.parent());
context.source(), update).parent(source.parent());
// reset the context to free up memory
context.reset(null, null, null, null);
return doc;
@ -600,10 +563,10 @@ public class DocumentMapper implements ToXContent {
* @return transformed version of transformMe. This may actually be the same object as sourceAsMap
*/
public Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap) {
if (sourceTransforms == null) {
if (mapping.sourceTransforms.length == 0) {
return sourceAsMap;
}
for (SourceTransform transform : sourceTransforms) {
for (SourceTransform transform : mapping.sourceTransforms) {
sourceAsMap = transform.transformSourceAsMap(sourceAsMap);
}
return sourceAsMap;
@ -629,12 +592,12 @@ public class DocumentMapper implements ToXContent {
}
public void traverse(FieldMapperListener listener) {
for (RootMapper rootMapper : rootMappersOrdered) {
for (RootMapper rootMapper : mapping.rootMappers) {
if (!rootMapper.includeInObject() && rootMapper instanceof FieldMapper) {
listener.fieldMapper((FieldMapper) rootMapper);
}
}
rootObjectMapper.traverse(listener);
mapping.root.traverse(listener);
}
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
@ -662,7 +625,7 @@ public class DocumentMapper implements ToXContent {
}
public void traverse(ObjectMapperListener listener) {
rootObjectMapper.traverse(listener);
mapping.root.traverse(listener);
}
private MergeContext newMergeContext(MergeFlags mergeFlags) {
@ -672,11 +635,13 @@ public class DocumentMapper implements ToXContent {
@Override
public void addFieldMappers(List<FieldMapper<?>> fieldMappers) {
assert mergeFlags().simulate() == false;
DocumentMapper.this.addFieldMappers(fieldMappers);
}
@Override
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
assert mergeFlags().simulate() == false;
DocumentMapper.this.addObjectMappers(objectMappers);
}
@ -698,29 +663,13 @@ public class DocumentMapper implements ToXContent {
};
}
public synchronized MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) {
public synchronized MergeResult merge(Mapping mapping, MergeFlags mergeFlags) {
final MergeContext mergeContext = newMergeContext(mergeFlags);
assert rootMappers.size() == mergeWith.rootMappers.size();
rootObjectMapper.merge(mergeWith.rootObjectMapper, mergeContext);
for (Map.Entry<Class<? extends RootMapper>, RootMapper> entry : rootMappers.entrySet()) {
// root mappers included in root object will get merge in the rootObjectMapper
if (entry.getValue().includeInObject()) {
continue;
}
RootMapper mergeWithRootMapper = mergeWith.rootMappers.get(entry.getKey());
if (mergeWithRootMapper != null) {
entry.getValue().merge(mergeWithRootMapper, mergeContext);
}
}
if (!mergeFlags.simulate()) {
// let the merge with attributes to override the attributes
meta = mergeWith.meta();
// update the source of the merged one
final MergeResult mergeResult = this.mapping.merge(mapping, mergeContext);
if (mergeFlags.simulate() == false) {
refreshSource();
}
return new MergeResult(mergeContext.buildConflicts());
return mergeResult;
}
public CompressedString refreshSource() throws ElasticsearchGenerationException {
@ -739,51 +688,15 @@ public class DocumentMapper implements ToXContent {
public void close() {
cache.close();
rootObjectMapper.close();
for (RootMapper rootMapper : rootMappersOrdered) {
mapping.root.close();
for (RootMapper rootMapper : mapping.rootMappers) {
rootMapper.close();
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
rootObjectMapper.toXContent(builder, params, new ToXContent() {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (sourceTransforms != null) {
if (sourceTransforms.size() == 1) {
builder.field("transform");
sourceTransforms.get(0).toXContent(builder, params);
} else {
builder.startArray("transform");
for (SourceTransform transform: sourceTransforms) {
transform.toXContent(builder, params);
}
builder.endArray();
}
}
if (meta != null && !meta.isEmpty()) {
builder.field("_meta", meta());
}
return builder;
}
// no need to pass here id and boost, since they are added to the root object mapper
// in the constructor
}, rootMappersNotIncludedInObject);
return builder;
}
/**
* Transformations to be applied to the source before indexing and/or after loading.
*/
private interface SourceTransform extends ToXContent {
/**
* Transform the source when it is expressed as a map. This is public so it can be transformed the source is loaded.
* @param sourceAsMap source to transform. This may be mutated by the script.
* @return transformed version of transformMe. This may actually be the same object as sourceAsMap
*/
Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap);
return mapping.toXContent(builder, params);
}
/**

View File

@ -339,7 +339,7 @@ public class MapperService extends AbstractIndexComponent {
DocumentMapper oldMapper = mappers.get(mapper.type());
if (oldMapper != null) {
DocumentMapper.MergeResult result = oldMapper.merge(mapper, mergeFlags().simulate(false));
DocumentMapper.MergeResult result = oldMapper.merge(mapper.mapping(), mergeFlags().simulate(false));
if (result.hasConflicts()) {
// TODO: What should we do???
if (logger.isDebugEnabled()) {
@ -417,26 +417,19 @@ public class MapperService extends AbstractIndexComponent {
}
/**
* Returns the document mapper created, including if the document mapper ended up
* being actually created or not in the second tuple value.
* Returns the document mapper created, including a mapping update if the
* type has been dynamically created.
*/
public Tuple<DocumentMapper, Boolean> documentMapperWithAutoCreate(String type) {
public Tuple<DocumentMapper, Mapping> documentMapperWithAutoCreate(String type) {
DocumentMapper mapper = mappers.get(type);
if (mapper != null) {
return Tuple.tuple(mapper, Boolean.FALSE);
return Tuple.tuple(mapper, null);
}
if (!dynamic) {
throw new TypeMissingException(index, type, "trying to auto create mapping, but dynamic mapping is disabled");
}
// go ahead and dynamically create it
synchronized (typeMutex) {
mapper = mappers.get(type);
if (mapper != null) {
return Tuple.tuple(mapper, Boolean.FALSE);
}
merge(type, null, true);
return Tuple.tuple(mappers.get(type), Boolean.TRUE);
}
mapper = parse(type, null, true);
return Tuple.tuple(mapper, mapper.mapping());
}
/**

View File

@ -19,10 +19,8 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.io.IOException;
import java.util.Collection;
@ -44,12 +42,8 @@ public enum MapperUtils {
return mapper;
}
/**
* Merge {@code mergeWith} into {@code mergeTo}. Note: this method only
* merges mappings, not lookup structures. Conflicts are returned as exceptions.
*/
public static void merge(Mapper mergeInto, Mapper mergeWith) {
MergeContext ctx = new MergeContext(new DocumentMapper.MergeFlags().simulate(false)) {
private static MergeContext newStrictMergeContext() {
return new MergeContext(new DocumentMapper.MergeFlags().simulate(false)) {
@Override
public boolean hasConflicts() {
@ -73,10 +67,25 @@ public enum MapperUtils {
@Override
public void addConflict(String mergeFailure) {
throw new ElasticsearchIllegalStateException("Merging dynamic updates triggered a conflict: " + mergeFailure);
throw new MapperParsingException("Merging dynamic updates triggered a conflict: " + mergeFailure);
}
};
mergeInto.merge(mergeWith, ctx);
}
/**
* Merge {@code mergeWith} into {@code mergeTo}. Note: this method only
* merges mappings, not lookup structures. Conflicts are returned as exceptions.
*/
public static void merge(Mapper mergeInto, Mapper mergeWith) {
mergeInto.merge(mergeWith, newStrictMergeContext());
}
/**
* Merge {@code mergeWith} into {@code mergeTo}. Note: this method only
* merges mappings, not lookup structures. Conflicts are returned as exceptions.
*/
public static void merge(Mapping mergeInto, Mapping mergeWith) {
mergeInto.merge(mergeWith, newStrictMergeContext());
}
}

View File

@ -0,0 +1,171 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper.MergeResult;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Wrapper around everything that defines a mapping, without references to
* utility classes like MapperService, ...
*/
public final class Mapping implements ToXContent {
/**
* Transformations to be applied to the source before indexing and/or after loading.
*/
public interface SourceTransform extends ToXContent {
/**
* Transform the source when it is expressed as a map. This is public so it can be transformed the source is loaded.
* @param sourceAsMap source to transform. This may be mutated by the script.
* @return transformed version of transformMe. This may actually be the same object as sourceAsMap
*/
Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap);
}
final RootObjectMapper root;
final RootMapper[] rootMappers;
final RootMapper[] rootMappersNotIncludedInObject;
final ImmutableMap<Class<? extends RootMapper>, RootMapper> rootMappersMap;
final SourceTransform[] sourceTransforms;
volatile ImmutableMap<String, Object> meta;
public Mapping(RootObjectMapper rootObjectMapper, RootMapper[] rootMappers, SourceTransform[] sourceTransforms, ImmutableMap<String, Object> meta) {
this.root = rootObjectMapper;
this.rootMappers = rootMappers;
List<RootMapper> rootMappersNotIncludedInObject = new ArrayList<>();
ImmutableMap.Builder<Class<? extends RootMapper>, RootMapper> builder = ImmutableMap.builder();
for (RootMapper rootMapper : rootMappers) {
if (rootMapper.includeInObject()) {
root.putMapper(rootMapper);
} else {
rootMappersNotIncludedInObject.add(rootMapper);
}
builder.put(rootMapper.getClass(), rootMapper);
}
this.rootMappersNotIncludedInObject = rootMappersNotIncludedInObject.toArray(new RootMapper[rootMappersNotIncludedInObject.size()]);
this.rootMappersMap = builder.build();
this.sourceTransforms = sourceTransforms;
this.meta = meta;
}
/** Return the root object mapper. */
public RootObjectMapper root() {
return root;
}
/**
* Generate a mapping update for the given root object mapper.
*/
public Mapping mappingUpdate(Mapper rootObjectMapper) {
return new Mapping((RootObjectMapper) rootObjectMapper, rootMappers, sourceTransforms, meta);
}
/** Get the root mapper with the given class. */
@SuppressWarnings("unchecked")
public <T extends RootMapper> T rootMapper(Class<T> clazz) {
return (T) rootMappersMap.get(clazz);
}
/** @see DocumentMapper#merge(DocumentMapper, org.elasticsearch.index.mapper.DocumentMapper.MergeFlags) */
public MergeResult merge(Mapping mergeWith, MergeContext mergeContext) {
assert rootMappers.length == mergeWith.rootMappers.length;
root.merge(mergeWith.root, mergeContext);
for (RootMapper rootMapper : rootMappers) {
// root mappers included in root object will get merge in the rootObjectMapper
if (rootMapper.includeInObject()) {
continue;
}
RootMapper mergeWithRootMapper = mergeWith.rootMapper(rootMapper.getClass());
if (mergeWithRootMapper != null) {
rootMapper.merge(mergeWithRootMapper, mergeContext);
}
}
if (mergeContext.mergeFlags().simulate() == false) {
// let the merge with attributes to override the attributes
meta = mergeWith.meta;
}
return new MergeResult(mergeContext.buildConflicts());
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
root.toXContent(builder, params, new ToXContent() {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (sourceTransforms.length > 0) {
if (sourceTransforms.length == 1) {
builder.field("transform");
sourceTransforms[0].toXContent(builder, params);
} else {
builder.startArray("transform");
for (SourceTransform transform: sourceTransforms) {
transform.toXContent(builder, params);
}
builder.endArray();
}
}
if (meta != null && !meta.isEmpty()) {
builder.field("_meta", meta);
}
return builder;
}
// no need to pass here id and boost, since they are added to the root object mapper
// in the constructor
}, rootMappersNotIncludedInObject);
return builder;
}
/** Serialize to a {@link BytesReference}. */
public BytesReference toBytes() {
try {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
toXContent(builder, new ToXContent.MapParams(ImmutableMap.<String, String>of()));
return builder.endObject().bytes();
} catch (IOException bogus) {
throw new AssertionError(bogus);
}
}
@Override
public String toString() {
try {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
toXContent(builder, new ToXContent.MapParams(ImmutableMap.<String, String>of()));
return builder.endObject().string();
} catch (IOException bogus) {
throw new AssertionError(bogus);
}
}
}

View File

@ -359,13 +359,13 @@ public abstract class ParseContext {
}
@Override
public void addRootObjectUpdate(RootObjectMapper update) {
in.addRootObjectUpdate(update);
public void addDynamicMappingsUpdate(Mapper update) {
in.addDynamicMappingsUpdate(update);
}
@Override
public List<RootObjectMapper> updates() {
return in.updates();
public Mapper dynamicMappingsUpdate() {
return in.dynamicMappingsUpdate();
}
}
@ -401,13 +401,11 @@ public abstract class ParseContext {
private Map<String, String> ignoredValues = new HashMap<>();
private boolean mappingsModified = false;
private AllEntries allEntries = new AllEntries();
private float docBoost = 1.0f;
private final List<RootObjectMapper> rootMapperDynamicUpdates = new ArrayList<>();
private Mapper dynamicMappingsUpdate = null;
public InternalParseContext(String index, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) {
this.index = index;
@ -432,12 +430,11 @@ public abstract class ParseContext {
this.sourceToParse = source;
this.source = source == null ? null : sourceToParse.source();
this.path.reset();
this.mappingsModified = false;
this.listener = listener == null ? DocumentMapper.ParseListener.EMPTY : listener;
this.allEntries = new AllEntries();
this.ignoredValues.clear();
this.docBoost = 1.0f;
this.rootMapperDynamicUpdates.clear();
this.dynamicMappingsUpdate = null;
}
@Override
@ -604,13 +601,18 @@ public abstract class ParseContext {
}
@Override
public void addRootObjectUpdate(RootObjectMapper mapper) {
rootMapperDynamicUpdates.add(mapper);
public void addDynamicMappingsUpdate(Mapper mapper) {
assert mapper instanceof RootObjectMapper : mapper;
if (dynamicMappingsUpdate == null) {
dynamicMappingsUpdate = mapper;
} else {
MapperUtils.merge(dynamicMappingsUpdate, mapper);
}
}
@Override
public List<RootObjectMapper> updates() {
return rootMapperDynamicUpdates;
public Mapper dynamicMappingsUpdate() {
return dynamicMappingsUpdate;
}
}
@ -820,13 +822,11 @@ public abstract class ParseContext {
/**
* Add a dynamic update to the root object mapper.
* TODO: can we nuke it, it is only needed for copy_to
*/
public abstract void addRootObjectUpdate(RootObjectMapper update);
public abstract void addDynamicMappingsUpdate(Mapper update);
/**
* Get dynamic updates to the root object mapper.
* TODO: can we nuke it, it is only needed for copy_to
*/
public abstract List<RootObjectMapper> updates();
public abstract Mapper dynamicMappingsUpdate();
}

View File

@ -19,10 +19,8 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.index.mapper.ParseContext.Document;
import java.util.List;
@ -48,11 +46,11 @@ public class ParsedDocument {
private BytesReference source;
private boolean mappingsModified;
private Mapping dynamicMappingsUpdate;
private String parent;
public ParsedDocument(Field uid, Field version, String id, String type, String routing, long timestamp, long ttl, List<Document> documents, BytesReference source, boolean mappingsModified) {
public ParsedDocument(Field uid, Field version, String id, String type, String routing, long timestamp, long ttl, List<Document> documents, BytesReference source, Mapping dynamicMappingsUpdate) {
this.uid = uid;
this.version = version;
this.id = id;
@ -62,7 +60,7 @@ public class ParsedDocument {
this.ttl = ttl;
this.documents = documents;
this.source = source;
this.mappingsModified = mappingsModified;
this.dynamicMappingsUpdate = dynamicMappingsUpdate;
}
public Field uid() {
@ -119,28 +117,19 @@ public class ParsedDocument {
}
/**
* Has the parsed document caused mappings to be modified?
* Return dynamic updates to mappings or {@code null} if there were no
* updates to the mappings.
*/
public boolean mappingsModified() {
return mappingsModified;
public Mapping dynamicMappingsUpdate() {
return dynamicMappingsUpdate;
}
/**
* latches the mapping to be marked as modified.
*/
public void setMappingsModified() {
this.mappingsModified = true;
}
/**
* Uses the value of get document or create to automatically set if mapping is
* modified or not.
*/
public ParsedDocument setMappingsModified(Tuple<DocumentMapper, Boolean> docMapper) {
if (docMapper.v2()) {
setMappingsModified();
public void addDynamicMappingsUpdate(Mapping update) {
if (dynamicMappingsUpdate == null) {
dynamicMappingsUpdate = update;
} else {
MapperUtils.merge(dynamicMappingsUpdate, update);
}
return this;
}
@Override

View File

@ -1112,7 +1112,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
update = parent.mappingUpdate(update);
objectPath = parentPath;
}
context.addRootObjectUpdate((RootObjectMapper) update);
context.addDynamicMappingsUpdate((RootObjectMapper) update);
}
}
}

View File

@ -366,7 +366,7 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
ParentFieldMapper other = (ParentFieldMapper) mergeWith;
if (!Objects.equal(type, other.type)) {
mergeContext.addConflict("The _parent field's type option can't be changed");
mergeContext.addConflict("The _parent field's type option can't be changed: [" + type + "]->[" + other.type + "]");
}
if (!mergeContext.mergeFlags().simulate()) {

View File

@ -502,6 +502,10 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll, Clonea
return this.dynamic == null ? Dynamic.TRUE : this.dynamic;
}
public void setDynamic(Dynamic dynamic) {
this.dynamic = dynamic;
}
protected boolean allowValue() {
return true;
}
@ -1045,13 +1049,16 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll, Clonea
}
}
if (!mappers.isEmpty()) {
builder.startObject("properties");
for (Mapper mapper : sortedMappers) {
if (!(mapper instanceof InternalMapper)) {
mapper.toXContent(builder, params);
int count = 0;
for (Mapper mapper : sortedMappers) {
if (!(mapper instanceof InternalMapper)) {
if (count++ == 0) {
builder.startObject("properties");
}
mapper.toXContent(builder, params);
}
}
if (count > 0) {
builder.endObject();
}
builder.endObject();

View File

@ -33,7 +33,6 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.Version;
import org.elasticsearch.action.WriteFailureException;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.admin.indices.optimize.OptimizeRequest;
import org.elasticsearch.cluster.ClusterService;
@ -450,18 +449,13 @@ public class IndexShard extends AbstractIndexShardComponent {
return prepareCreate(docMapper(source.type()), source, version, versionType, origin, state != IndexShardState.STARTED || canHaveDuplicates, autoGeneratedId);
}
static Engine.Create prepareCreate(Tuple<DocumentMapper, Boolean> docMapper, SourceToParse source, long version, VersionType versionType, Engine.Operation.Origin origin, boolean canHaveDuplicates, boolean autoGeneratedId) throws ElasticsearchException {
static Engine.Create prepareCreate(Tuple<DocumentMapper, Mapping> docMapper, SourceToParse source, long version, VersionType versionType, Engine.Operation.Origin origin, boolean canHaveDuplicates, boolean autoGeneratedId) throws ElasticsearchException {
long startTime = System.nanoTime();
try {
ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper);
return new Engine.Create(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates, autoGeneratedId);
} catch (Throwable t) {
if (docMapper.v2()) {
throw new WriteFailureException(t, docMapper.v1().type());
} else {
throw t;
}
ParsedDocument doc = docMapper.v1().parse(source);
if (docMapper.v2() != null) {
doc.addDynamicMappingsUpdate(docMapper.v2());
}
return new Engine.Create(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates, autoGeneratedId);
}
public ParsedDocument create(Engine.Create create) throws ElasticsearchException {
@ -486,18 +480,13 @@ public class IndexShard extends AbstractIndexShardComponent {
return prepareIndex(docMapper(source.type()), source, version, versionType, origin, state != IndexShardState.STARTED || canHaveDuplicates);
}
static Engine.Index prepareIndex(Tuple<DocumentMapper, Boolean> docMapper, SourceToParse source, long version, VersionType versionType, Engine.Operation.Origin origin, boolean canHaveDuplicates) throws ElasticsearchException {
static Engine.Index prepareIndex(Tuple<DocumentMapper, Mapping> docMapper, SourceToParse source, long version, VersionType versionType, Engine.Operation.Origin origin, boolean canHaveDuplicates) throws ElasticsearchException {
long startTime = System.nanoTime();
try {
ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper);
return new Engine.Index(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates);
} catch (Throwable t) {
if (docMapper.v2()) {
throw new WriteFailureException(t, docMapper.v1().type());
} else {
throw t;
}
ParsedDocument doc = docMapper.v1().parse(source);
if (docMapper.v2() != null) {
doc.addDynamicMappingsUpdate(docMapper.v2());
}
return new Engine.Index(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates);
}
public ParsedDocument index(Engine.Index index) throws ElasticsearchException {
@ -800,14 +789,14 @@ public class IndexShard extends AbstractIndexShardComponent {
/**
* After the store has been recovered, we need to start the engine in order to apply operations
*/
public Set<String> performTranslogRecovery() throws ElasticsearchException {
final Set<String> recoveredTypes = internalPerformTranslogRecovery(false);
public Map<String, Mapping> performTranslogRecovery() throws ElasticsearchException {
final Map<String, Mapping> recoveredTypes = internalPerformTranslogRecovery(false);
assert recoveryState.getStage() == RecoveryState.Stage.TRANSLOG : "TRANSLOG stage expected but was: " + recoveryState.getStage();
return recoveredTypes;
}
private Set<String> internalPerformTranslogRecovery(boolean skipTranslogRecovery) throws ElasticsearchException {
private Map<String, Mapping> internalPerformTranslogRecovery(boolean skipTranslogRecovery) throws ElasticsearchException {
if (state != IndexShardState.RECOVERING) {
throw new IndexShardNotRecoveringException(shardId, state);
}
@ -832,7 +821,7 @@ public class IndexShard extends AbstractIndexShardComponent {
*/
public void skipTranslogRecovery() throws ElasticsearchException {
assert engineUnsafe() == null : "engine was already created";
Set<String> recoveredTypes = internalPerformTranslogRecovery(true);
Map<String, Mapping> recoveredTypes = internalPerformTranslogRecovery(true);
assert recoveredTypes.isEmpty();
assert recoveryState.getTranslog().recoveredOperations() == 0;
}
@ -1277,7 +1266,7 @@ public class IndexShard extends AbstractIndexShardComponent {
return indexSettings.get(IndexMetaData.SETTING_UUID, IndexMetaData.INDEX_UUID_NA_VALUE);
}
private Tuple<DocumentMapper, Boolean> docMapper(String type) {
private Tuple<DocumentMapper, Mapping> docMapper(String type) {
return mapperService.documentMapperWithAutoCreate(type);
}

View File

@ -26,14 +26,17 @@ import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperAnalyzer;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperUtils;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.translog.Translog;
import java.util.HashSet;
import java.util.Set;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.index.mapper.SourceToParse.source;
@ -47,7 +50,7 @@ public class TranslogRecoveryPerformer {
private final IndexAliasesService indexAliasesService;
private final IndexCache indexCache;
private final MapperAnalyzer mapperAnalyzer;
private final Set<String> recoveredTypes = new HashSet<>();
private final Map<String, Mapping> recoveredTypes = new HashMap<>();
protected TranslogRecoveryPerformer(MapperService mapperService, MapperAnalyzer mapperAnalyzer, IndexQueryParserService queryParserService, IndexAliasesService indexAliasesService, IndexCache indexCache) {
this.mapperService = mapperService;
@ -57,7 +60,7 @@ public class TranslogRecoveryPerformer {
this.mapperAnalyzer = mapperAnalyzer;
}
protected Tuple<DocumentMapper, Boolean> docMapper(String type) {
protected Tuple<DocumentMapper, Mapping> docMapper(String type) {
return mapperService.documentMapperWithAutoCreate(type); // protected for testing
}
@ -74,6 +77,15 @@ public class TranslogRecoveryPerformer {
return numOps;
}
private void addMappingUpdate(String type, Mapping update) {
Mapping currentUpdate = recoveredTypes.get(type);
if (currentUpdate == null) {
recoveredTypes.put(type, update);
} else {
MapperUtils.merge(currentUpdate, update);
}
}
/**
* Performs a single recovery operation, and returns the indexing operation (or null if its not an indexing operation)
* that can then be used for mapping updates (for example) if needed.
@ -89,8 +101,8 @@ public class TranslogRecoveryPerformer {
create.version(), create.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY, true, false);
mapperAnalyzer.setType(create.type()); // this is a PITA - once mappings are per index not per type this can go away an we can just simply move this to the engine eventually :)
engine.create(engineCreate);
if (engineCreate.parsedDoc().mappingsModified()) {
recoveredTypes.add(engineCreate.type());
if (engineCreate.parsedDoc().dynamicMappingsUpdate() != null) {
addMappingUpdate(engineCreate.type(), engineCreate.parsedDoc().dynamicMappingsUpdate());
}
break;
case SAVE:
@ -100,8 +112,8 @@ public class TranslogRecoveryPerformer {
index.version(), index.versionType().versionTypeForReplicationAndRecovery(), Engine.Operation.Origin.RECOVERY, true);
mapperAnalyzer.setType(index.type());
engine.index(engineIndex);
if (engineIndex.parsedDoc().mappingsModified()) {
recoveredTypes.add(engineIndex.type());
if (engineIndex.parsedDoc().dynamicMappingsUpdate() != null) {
addMappingUpdate(engineIndex.type(), engineIndex.parsedDoc().dynamicMappingsUpdate());
}
break;
case DELETE:
@ -150,7 +162,7 @@ public class TranslogRecoveryPerformer {
/**
* Returns the recovered types modifying the mapping during the recovery
*/
public Set<String> getRecoveredTypes() {
public Map<String, Mapping> getRecoveredTypes() {
return recoveredTypes;
}
}

View File

@ -252,7 +252,7 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
return MultiFields.getFields(index.createSearcher().getIndexReader());
}
private Fields generateTermVectorsFromDoc(TermVectorsRequest request, boolean doAllFields) throws IOException {
private Fields generateTermVectorsFromDoc(TermVectorsRequest request, boolean doAllFields) throws Throwable {
// parse the document, at the moment we do update the mapping, just like percolate
ParsedDocument parsedDocument = parseDocument(indexShard.shardId().getIndex(), request.type(), request.doc());
@ -283,15 +283,18 @@ public class ShardTermVectorsService extends AbstractIndexShardComponent {
return generateTermVectors(getFields, request.offsets(), request.perFieldAnalyzer());
}
private ParsedDocument parseDocument(String index, String type, BytesReference doc) {
private ParsedDocument parseDocument(String index, String type, BytesReference doc) throws Throwable {
MapperService mapperService = indexShard.mapperService();
IndexService indexService = indexShard.indexService();
// TODO: make parsing not dynamically create fields not in the original mapping
Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(type);
ParsedDocument parsedDocument = docMapper.v1().parse(source(doc).type(type).flyweight(true)).setMappingsModified(docMapper);
if (parsedDocument.mappingsModified()) {
mappingUpdatedAction.updateMappingOnMaster(index, docMapper.v1(), indexService.indexUUID());
Tuple<DocumentMapper, Mapping> docMapper = mapperService.documentMapperWithAutoCreate(type);
ParsedDocument parsedDocument = docMapper.v1().parse(source(doc).type(type).flyweight(true));
if (docMapper.v2() != null) {
parsedDocument.addDynamicMappingsUpdate(docMapper.v2());
}
if (parsedDocument.dynamicMappingsUpdate() != null) {
mappingUpdatedAction.updateMappingOnMasterSynchronously(index, indexService.indexUUID(), type, parsedDocument.dynamicMappingsUpdate());
}
return parsedDocument;
}

View File

@ -567,7 +567,7 @@ public class RecoverySourceHandler implements Engine.RecoveryHandler {
}
};
for (DocumentMapper documentMapper : documentMappersToUpdate) {
mappingUpdatedAction.updateMappingOnMaster(indexService.index().getName(), documentMapper, indexService.indexUUID(), listener);
mappingUpdatedAction.updateMappingOnMaster(indexService.index().getName(), indexService.indexUUID(), documentMapper.type(), documentMapper.mapping(), listener);
}
cancellableThreads.execute(new Interruptable() {
@Override

View File

@ -69,6 +69,8 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperUtils;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
@ -280,10 +282,13 @@ public class PercolatorService extends AbstractComponent {
}
MapperService mapperService = documentIndexService.mapperService();
Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(request.documentType());
doc = docMapper.v1().parse(source(parser).type(request.documentType()).flyweight(true)).setMappingsModified(docMapper);
if (doc.mappingsModified()) {
mappingUpdatedAction.updateMappingOnMaster(request.shardId().getIndex(), docMapper.v1(), documentIndexService.indexUUID());
Tuple<DocumentMapper, Mapping> docMapper = mapperService.documentMapperWithAutoCreate(request.documentType());
doc = docMapper.v1().parse(source(parser).type(request.documentType()).flyweight(true));
if (docMapper.v2() != null) {
doc.addDynamicMappingsUpdate(docMapper.v2());
}
if (doc.dynamicMappingsUpdate() != null) {
mappingUpdatedAction.updateMappingOnMasterSynchronously(request.shardId().getIndex(), documentIndexService.indexUUID(), request.documentType(), doc.dynamicMappingsUpdate());
}
// the document parsing exists the "doc" object, so we need to set the new current field.
currentFieldName = parser.currentName();
@ -387,7 +392,7 @@ public class PercolatorService extends AbstractComponent {
try {
parser = XContentFactory.xContent(fetchedDoc).createParser(fetchedDoc);
MapperService mapperService = documentIndexService.mapperService();
Tuple<DocumentMapper, Boolean> docMapper = mapperService.documentMapperWithAutoCreate(type);
Tuple<DocumentMapper, Mapping> docMapper = mapperService.documentMapperWithAutoCreate(type);
doc = docMapper.v1().parse(source(parser).type(type).flyweight(true));
if (context.highlight() != null) {

View File

@ -19,6 +19,8 @@
package org.elasticsearch.index.engine;
import com.google.common.collect.ImmutableMap;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
@ -63,11 +65,16 @@ import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
import org.elasticsearch.index.engine.Engine.Searcher;
import org.elasticsearch.index.indexing.ShardIndexingService;
import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperAnalyzer;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.RootMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
@ -198,12 +205,12 @@ public class InternalEngineTests extends ElasticsearchTestCase {
}
private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, Document document, BytesReference source, boolean mappingsModified) {
private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, Document document, BytesReference source, Mapping mappingUpdate) {
Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE);
Field versionField = new NumericDocValuesField("_version", 0);
document.add(uidField);
document.add(versionField);
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsModified);
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate);
}
protected Store createStore() throws IOException {
@ -286,10 +293,10 @@ public class InternalEngineTests extends ElasticsearchTestCase {
final boolean defaultCompound = defaultSettings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true);
// create a doc and refresh
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc));
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null);
engine.create(new Engine.Create(null, newUid("2"), doc2));
engine.refresh("test");
@ -322,7 +329,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
((InternalEngine) engine).config().setCompoundOnFlush(false);
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null);
engine.create(new Engine.Create(null, newUid("3"), doc3));
engine.refresh("test");
@ -369,7 +376,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
assertThat(segments.get(1).isCompound(), equalTo(false));
((InternalEngine) engine).config().setCompoundOnFlush(true);
ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null);
engine.create(new Engine.Create(null, newUid("4"), doc4));
engine.refresh("test");
@ -400,7 +407,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
List<Segment> segments = engine.segments(true);
assertThat(segments.isEmpty(), equalTo(true));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc));
engine.refresh("test");
@ -408,10 +415,10 @@ public class InternalEngineTests extends ElasticsearchTestCase {
assertThat(segments.size(), equalTo(1));
assertThat(segments.get(0).ramTree, notNullValue());
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null);
engine.create(new Engine.Create(null, newUid("2"), doc2));
engine.refresh("test");
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null);
engine.create(new Engine.Create(null, newUid("3"), doc3));
engine.refresh("test");
@ -432,7 +439,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
Translog translog = createTranslog();
Engine engine = createEngine(indexSettingsService, store, translog, mergeSchedulerProvider)) {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
engine.index(index);
engine.flush();
@ -490,7 +497,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
// create a document
Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc));
// its not there...
@ -529,7 +536,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false);
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null);
engine.index(new Engine.Index(null, newUid("1"), doc));
// its not updated yet...
@ -582,7 +589,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
// add it back
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc));
// its not there...
@ -616,7 +623,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
// now do an update
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
engine.index(new Engine.Index(null, newUid("1"), doc));
// its not updated yet...
@ -645,7 +652,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
searchResult.close();
// create a document
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc));
// its not there...
@ -678,7 +685,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testFailEngineOnCorruption() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc));
engine.flush();
final int failInPhase = randomIntBetween(1, 3);
@ -715,7 +722,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
searchResult.close();
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null);
engine.create(new Engine.Create(null, newUid("2"), doc2));
engine.refresh("foo");
@ -732,7 +739,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testSimpleRecover() throws Exception {
final ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
final ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc));
engine.flush();
@ -789,10 +796,10 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testRecoverWithOperationsBetweenPhase1AndPhase2() throws Exception {
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc1));
engine.flush();
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null);
engine.create(new Engine.Create(null, newUid("2"), doc2));
engine.recover(new Engine.RecoveryHandler() {
@ -824,10 +831,10 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testRecoverWithOperationsBetweenPhase1AndPhase2AndPhase3() throws Exception {
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc1 = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc1));
engine.flush();
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null);
engine.create(new Engine.Create(null, newUid("2"), doc2));
engine.recover(new Engine.RecoveryHandler() {
@ -844,7 +851,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
assertThat(create.source().toBytesArray(), equalTo(B_2));
// add for phase3
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null);
engine.create(new Engine.Create(null, newUid("3"), doc3));
} catch (IOException ex) {
throw new ElasticsearchException("failed", ex);
@ -870,7 +877,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testVersioningNewCreate() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Create create = new Engine.Create(null, newUid("1"), doc);
engine.create(create);
assertThat(create.version(), equalTo(1l));
@ -882,7 +889,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testExternalVersioningNewCreate() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Create create = new Engine.Create(null, newUid("1"), doc, 12, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, 0);
engine.create(create);
assertThat(create.version(), equalTo(12l));
@ -894,7 +901,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testVersioningNewIndex() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
@ -906,7 +913,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testExternalVersioningNewIndex() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(12l));
@ -918,7 +925,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testVersioningIndexConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
@ -947,7 +954,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testExternalVersioningIndexConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(12l));
@ -967,7 +974,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testVersioningIndexConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
@ -998,7 +1005,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testExternalVersioningIndexConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(12l));
@ -1021,7 +1028,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
public void testForceMerge() {
int numDocs = randomIntBetween(10, 100);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid(Integer.toString(i)), doc);
engine.index(index);
engine.refresh("test");
@ -1032,7 +1039,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
engine.forceMerge(true, 1, false, false, false);
assertEquals(engine.segments(true).size(), 1);
ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid(Integer.toString(0)), doc);
engine.delete(new Engine.Delete(index.type(), index.id(), index.uid()));
engine.forceMerge(true, 10, true, false, false); //expunge deletes
@ -1043,7 +1050,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
assertEquals(numDocs - 1, test.reader().maxDoc());
}
doc = testParsedDocument(Integer.toString(1), Integer.toString(1), "test", null, -1, -1, testDocument(), B_1, false);
doc = testParsedDocument(Integer.toString(1), Integer.toString(1), "test", null, -1, -1, testDocument(), B_1, null);
index = new Engine.Index(null, newUid(Integer.toString(1)), doc);
engine.delete(new Engine.Delete(index.type(), index.id(), index.uid()));
engine.forceMerge(true, 10, false, false, false); //expunge deletes
@ -1077,7 +1084,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
int numDocs = randomIntBetween(1, 20);
for (int j = 0; j < numDocs; j++) {
i++;
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid(Integer.toString(i)), doc);
engine.index(index);
}
@ -1111,7 +1118,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testVersioningDeleteConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
@ -1162,7 +1169,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testVersioningDeleteConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
@ -1219,7 +1226,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testVersioningCreateExistsException() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Create create = new Engine.Create(null, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
engine.create(create);
assertThat(create.version(), equalTo(1l));
@ -1235,7 +1242,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testVersioningCreateExistsExceptionWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Create create = new Engine.Create(null, newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
engine.create(create);
assertThat(create.version(), equalTo(1l));
@ -1253,7 +1260,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testVersioningReplicaConflict1() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
@ -1289,7 +1296,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testVersioningReplicaConflict2() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
@ -1339,7 +1346,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testBasicCreatedFlag() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
engine.index(index);
assertTrue(index.created());
@ -1357,7 +1364,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testCreatedFlagAfterFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
engine.index(index);
assertTrue(index.created());
@ -1414,7 +1421,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
@ -1450,7 +1457,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(null, newUid("1"), doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
@ -1482,7 +1489,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
Document document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null);
engine.index(new Engine.Index(null, newUid("1"), doc, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
// Delete document we just added:
@ -1611,7 +1618,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOException {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
boolean canHaveDuplicates = false;
boolean autoGeneratedId = true;
@ -1650,7 +1657,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
@Test
public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() throws IOException {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
boolean canHaveDuplicates = true;
boolean autoGeneratedId = true;
@ -1703,7 +1710,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
final Engine engine = new InternalEngine(config(indexSettingsService, store, translog, createMergeScheduler(indexSettingsService)), false)) {
for (int i = 0; i < 100; i++) {
String id = Integer.toString(i);
ParsedDocument doc = testParsedDocument(id, id, "test", null, -1, -1, testDocument(), B_1, false);
ParsedDocument doc = testParsedDocument(id, id, "test", null, -1, -1, testDocument(), B_1, null);
engine.index(new Engine.Index(null, newUid(id), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
}
@ -1738,7 +1745,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
boolean autoGeneratedId = true;
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), false);
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Create firstIndexRequest = new Engine.Create(null, newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
@ -1795,7 +1802,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
boolean autoGeneratedId = true;
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), false);
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Create firstIndexRequest = new Engine.Create(null, newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
@ -1824,12 +1831,18 @@ public class InternalEngineTests extends ElasticsearchTestCase {
}
private Mapping dynamicUpdate() {
BuilderContext context = new BuilderContext(ImmutableSettings.EMPTY, new ContentPath());
final RootObjectMapper root = MapperBuilders.rootObject("some_type").build(context);
return new Mapping(root, new RootMapper[0], new Mapping.SourceTransform[0], ImmutableMap.<String, Object>of());
}
public void testTranslogReplay() throws IOException {
boolean canHaveDuplicates = true;
boolean autoGeneratedId = true;
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), false);
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Create firstIndexRequest = new Engine.Create(null, newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
@ -1847,7 +1860,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
}
TranslogHandler parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer();
parser.mappingModified = randomBoolean();
parser.mappingUpdate = dynamicUpdate();
long currentTranslogId = translog.currentId();
engine.close();
@ -1861,9 +1874,9 @@ public class InternalEngineTests extends ElasticsearchTestCase {
}
parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer();
assertEquals(numDocs, parser.recoveredOps.get());
if (parser.mappingModified) {
if (parser.mappingUpdate != null) {
assertEquals(1, parser.getRecoveredTypes().size());
assertTrue(parser.getRecoveredTypes().contains("test"));
assertTrue(parser.getRecoveredTypes().containsKey("test"));
} else {
assertEquals(0, parser.getRecoveredTypes().size());
}
@ -1880,7 +1893,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
final boolean flush = randomBoolean();
int randomId = randomIntBetween(numDocs + 1, numDocs + 10);
String uuidValue = "test#" + Integer.toString(randomId);
ParsedDocument doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), false);
ParsedDocument doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Create firstIndexRequest = new Engine.Create(null, newUid(uuidValue), doc, 1, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
@ -1888,7 +1901,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
engine.flush();
}
doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), false);
doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index idxRequest = new Engine.Index(null, newUid(uuidValue), doc, 2, VersionType.EXTERNAL, PRIMARY, System.nanoTime());
engine.index(idxRequest);
engine.refresh("test");
@ -1922,7 +1935,7 @@ public class InternalEngineTests extends ElasticsearchTestCase {
public static class TranslogHandler extends TranslogRecoveryPerformer {
private final DocumentMapper docMapper;
public boolean mappingModified = false;
public Mapping mappingUpdate = null;
public final AtomicInteger recoveredOps = new AtomicInteger(0);
@ -1939,8 +1952,8 @@ public class InternalEngineTests extends ElasticsearchTestCase {
}
@Override
protected Tuple<DocumentMapper, Boolean> docMapper(String type) {
return new Tuple<>(docMapper, mappingModified);
protected Tuple<DocumentMapper, Mapping> docMapper(String type) {
return new Tuple<>(docMapper, mappingUpdate);
}
@Override

View File

@ -45,6 +45,7 @@ import org.elasticsearch.index.deletionpolicy.KeepOnlyLastDeletionPolicy;
import org.elasticsearch.index.deletionpolicy.SnapshotDeletionPolicy;
import org.elasticsearch.index.indexing.ShardIndexingService;
import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
@ -175,12 +176,12 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
}
private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, ParseContext.Document document, BytesReference source, boolean mappingsModified) {
private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, ParseContext.Document document, BytesReference source, Mapping mappingsUpdate) {
Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE);
Field versionField = new NumericDocValuesField("_version", 0);
document.add(uidField);
document.add(versionField);
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsModified);
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsUpdate);
}
protected Store createStore(Path p) throws IOException {
@ -276,10 +277,10 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
final boolean defaultCompound = defaultSettings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true);
// create a doc and refresh
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
primaryEngine.create(new Engine.Create(null, newUid("1"), doc));
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null);
primaryEngine.create(new Engine.Create(null, newUid("2"), doc2));
primaryEngine.refresh("test");
@ -338,7 +339,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
primaryEngine.config().setCompoundOnFlush(false);
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null);
primaryEngine.create(new Engine.Create(null, newUid("3"), doc3));
primaryEngine.refresh("test");
@ -409,7 +410,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
replicaEngine.refresh("test");
primaryEngine.config().setCompoundOnFlush(true);
ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null);
primaryEngine.create(new Engine.Create(null, newUid("4"), doc4));
primaryEngine.refresh("test");
@ -441,7 +442,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
List<Segment> segments = primaryEngine.segments(true);
assertThat(segments.isEmpty(), equalTo(true));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
primaryEngine.create(new Engine.Create(null, newUid("1"), doc));
primaryEngine.refresh("test");
@ -449,10 +450,10 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
assertThat(segments.size(), equalTo(1));
assertThat(segments.get(0).ramTree, notNullValue());
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, false);
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null);
primaryEngine.create(new Engine.Create(null, newUid("2"), doc2));
primaryEngine.refresh("test");
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, false);
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null);
primaryEngine.create(new Engine.Create(null, newUid("3"), doc3));
primaryEngine.refresh("test");
@ -479,7 +480,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
// create a document
ParseContext.Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
try {
replicaEngine.create(new Engine.Create(null, newUid("1"), doc));
fail("should have thrown an exception");
@ -498,7 +499,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
// index a document
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
try {
replicaEngine.index(new Engine.Index(null, newUid("1"), doc));
fail("should have thrown an exception");
@ -517,7 +518,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
// Now, add a document to the primary so we can test shadow engine deletes
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
primaryEngine.create(new Engine.Create(null, newUid("1"), doc));
primaryEngine.flush();
replicaEngine.refresh("test");
@ -573,7 +574,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
// create a document
ParseContext.Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
primaryEngine.create(new Engine.Create(null, newUid("1"), doc));
// its not there...
@ -629,7 +630,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, false);
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null);
primaryEngine.index(new Engine.Index(null, newUid("1"), doc));
// its not updated yet...
@ -700,7 +701,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
// add it back
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
primaryEngine.create(new Engine.Create(null, newUid("1"), doc));
// its not there...
@ -747,7 +748,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
// now do an update
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, false);
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
primaryEngine.index(new Engine.Index(null, newUid("1"), doc));
// its not updated yet...
@ -784,7 +785,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
searchResult.close();
// create a document
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
primaryEngine.create(new Engine.Create(null, newUid("1"), doc));
// its not there...
@ -830,7 +831,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
@Test
public void testFailEngineOnCorruption() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
primaryEngine.create(new Engine.Create(null, newUid("1"), doc));
primaryEngine.flush();
MockDirectoryWrapper leaf = DirectoryUtils.getLeaf(replicaEngine.config().getStore().directory(), MockDirectoryWrapper.class);
@ -869,7 +870,7 @@ public class ShadowEngineTests extends ElasticsearchTestCase {
@Test
public void testFailStart() throws IOException {
// Need a commit point for this
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, false);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
primaryEngine.create(new Engine.Create(null, newUid("1"), doc));
primaryEngine.flush();

View File

@ -20,8 +20,8 @@
package org.elasticsearch.index.mapper.camelcase;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
@ -39,18 +39,22 @@ public class CamelCaseFieldNameTests extends ElasticsearchSingleNodeTest {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper documentMapper = parser.parse(mapping);
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
DocumentMapper documentMapper = index.mapperService().documentMapper("type");
ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
.field("thisIsCamelCase", "value1")
.endObject().bytes());
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get();
assertThat(documentMapper.mappers().indexName("thisIsCamelCase").isEmpty(), equalTo(false));
assertThat(documentMapper.mappers().indexName("this_is_camel_case"), nullValue());
documentMapper.refreshSource();
documentMapper = parser.parse(documentMapper.mappingSource().string());
documentMapper = index.mapperService().documentMapperParser().parse(documentMapper.mappingSource().string());
assertThat(documentMapper.mappers().indexName("thisIsCamelCase").isEmpty(), equalTo(false));
assertThat(documentMapper.mappers().indexName("this_is_camel_case"), nullValue());

View File

@ -20,17 +20,23 @@
package org.elasticsearch.index.mapper.copyto;
import com.google.common.collect.ImmutableList;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
@ -40,7 +46,10 @@ import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.mapper.DocumentMapper.MergeFlags.mergeFlags;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;
/**
*
@ -72,7 +81,9 @@ public class CopyToMapperTests extends ElasticsearchSingleNodeTest {
.endObject()
.endObject().endObject().endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("type1").setSource(mapping).get();
DocumentMapper docMapper = index.mapperService().documentMapper("type1");
FieldMapper fieldMapper = docMapper.mappers().name("copy_test").mapper();
assertThat(fieldMapper, instanceOf(StringFieldMapper.class));
@ -96,7 +107,8 @@ public class CopyToMapperTests extends ElasticsearchSingleNodeTest {
.field("int_to_str_test", 42)
.endObject().bytes();
ParseContext.Document doc = docMapper.parse("type1", "1", json).rootDoc();
ParsedDocument parsedDoc = docMapper.parse("type1", "1", json);
ParseContext.Document doc = parsedDoc.rootDoc();
assertThat(doc.getFields("copy_test").length, equalTo(2));
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
assertThat(doc.getFields("copy_test")[1].stringValue(), equalTo("bar"));
@ -115,6 +127,9 @@ public class CopyToMapperTests extends ElasticsearchSingleNodeTest {
assertThat(doc.getFields("new_field").length, equalTo(2)); // new field has doc values
assertThat(doc.getFields("new_field")[0].numericValue().intValue(), equalTo(42));
assertNotNull(parsedDoc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type1").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
fieldMapper = docMapper.mappers().name("new_field").mapper();
assertThat(fieldMapper, instanceOf(LongFieldMapper.class));
}
@ -215,11 +230,11 @@ public class CopyToMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper docMapperAfter = parser.parse(mappingAfter);
DocumentMapper.MergeResult mergeResult = docMapperBefore.merge(docMapperAfter, mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = docMapperBefore.merge(docMapperAfter.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapperBefore.merge(docMapperAfter, mergeFlags().simulate(false));
docMapperBefore.merge(docMapperAfter.mapping(), mergeFlags().simulate(false));
fields = docMapperBefore.mappers().name("copy_test").mapper().copyTo().copyToFields();

View File

@ -64,12 +64,12 @@ public class TokenCountFieldMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2, mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(true));
assertThat(mergeResult.hasConflicts(), equalTo(false));
// Just simulated so merge hasn't happened yet
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword"));
mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
assertThat(mergeResult.hasConflicts(), equalTo(false));
// Just simulated so merge hasn't happened yet
assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard"));

View File

@ -32,10 +32,9 @@ import org.elasticsearch.common.util.LocaleUtils;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
@ -44,12 +43,12 @@ import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.elasticsearch.test.TestSearchContext;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
@ -74,9 +73,9 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.startObject("properties").endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper(mapping);
DocumentMapper defaultMapper = mapper("type", mapping);
defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("date_field1", "2011/01/22")
.field("date_field2", "2011/01/22 00:00:00")
@ -85,6 +84,8 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.field("wrong_date3", "2012/test")
.endObject()
.bytes());
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test-0").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get();
FieldMapper<?> fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1");
assertThat(fieldMapper, instanceOf(DateFieldMapper.class));
@ -136,7 +137,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper(mapping);
DocumentMapper defaultMapper = mapper("type", mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("date_field_en", "Wed, 06 Dec 2000 02:55:00 -0800")
@ -148,18 +149,18 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
assertNumericTokensEqual(doc, defaultMapper, "date_field_en", "date_field_default");
}
@Before
public void reset() {
i = 0;
}
int i = 0;
private DocumentMapper mapper(String mapping) throws IOException {
// we serialize and deserialize the mapping to make sure serialization works just fine
DocumentMapperParser parser = createIndex("test-" + (i++)).mapperService().documentMapperParser();
DocumentMapper defaultMapper = parser.parse(mapping);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject();
defaultMapper.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
String rebuildMapping = builder.string();
return parser.parse(rebuildMapping);
private DocumentMapper mapper(String type, String mapping) throws IOException {
final String indexName = "test-" + (i++);
IndexService index = createIndex(indexName);
client().admin().indices().preparePutMapping(indexName).setType(type).setSource(mapping).get();
return index.mapperService().documentMapper(type);
}
private void assertNumericTokensEqual(ParsedDocument doc, DocumentMapper defaultMapper, String fieldA, String fieldB) throws IOException {
@ -189,7 +190,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("date_field").field("type", "date").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper(mapping);
DocumentMapper defaultMapper = mapper("type", mapping);
long value = System.currentTimeMillis();
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
@ -207,7 +208,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("date_field").field("type", "date").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper(mapping);
DocumentMapper defaultMapper = mapper("type", mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -226,7 +227,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("date_field").field("type", "date").field("format", "HH:mm:ss").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper(mapping);
DocumentMapper defaultMapper = mapper("type", mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -254,7 +255,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("date_field").field("type", "date").field("format", "MMM dd HH:mm:ss").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper(mapping);
DocumentMapper defaultMapper = mapper("type", mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -285,7 +286,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper(mapping);
DocumentMapper defaultMapper = mapper("type", mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -357,15 +358,15 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper(initialMapping);
DocumentMapper mergeMapper = mapper(updatedMapping);
DocumentMapper defaultMapper = mapper("type", initialMapping);
DocumentMapper mergeMapper = mapper("type", updatedMapping);
assertThat(defaultMapper.mappers().name("field").mapper(), is(instanceOf(DateFieldMapper.class)));
DateFieldMapper initialDateFieldMapper = (DateFieldMapper) defaultMapper.mappers().name("field").mapper();
Map<String, String> config = getConfigurationViaXContent(initialDateFieldMapper);
assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy"));
DocumentMapper.MergeResult mergeResult = defaultMapper.merge(mergeMapper, DocumentMapper.MergeFlags.mergeFlags().simulate(false));
DocumentMapper.MergeResult mergeResult = defaultMapper.merge(mergeMapper.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertThat("Merging resulting in conflicts: " + Arrays.asList(mergeResult.conflicts()), mergeResult.hasConflicts(), is(false));
assertThat(defaultMapper.mappers().name("field").mapper(), is(instanceOf(DateFieldMapper.class)));
@ -380,7 +381,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("date_field").field("type", "date").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper(mapping);
DocumentMapper defaultMapper = mapper("type", mapping);
ParsedDocument parsedDoc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -414,7 +415,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("date_field").field("type", "date").field("format", "date_time").field("numeric_resolution", "seconds").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = mapper(mapping);
DocumentMapper defaultMapper = mapper("type", mapping);
// provided as an int
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()

View File

@ -18,59 +18,107 @@
*/
package org.elasticsearch.index.mapper.dynamic;
import com.google.common.base.Predicate;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.index.mapper.StrictDynamicMappingException;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicReference;
public class DynamicMappingIntegrationTests extends ElasticsearchIntegrationTest {
// https://github.com/elasticsearch/elasticsearch/issues/8423#issuecomment-64229717
@Test
public void testStrictAllMapping() throws Exception {
String defaultMapping = jsonBuilder().startObject().startObject("_default_")
.field("dynamic", "strict")
.endObject().endObject().string();
client().admin().indices().prepareCreate("index").addMapping("_default_", defaultMapping).get();
public void testConflictingDynamicMappings() {
// we don't use indexRandom because the order of requests is important here
createIndex("index");
client().prepareIndex("index", "type", "1").setSource("foo", 3).get();
try {
client().prepareIndex("index", "type", "id").setSource("test", "test").get();
fail();
} catch (StrictDynamicMappingException ex) {
// this should not be created dynamically so we expect this exception
}
awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(java.lang.Object input) {
GetMappingsResponse currentMapping = client().admin().indices().prepareGetMappings("index").get();
return currentMapping.getMappings().get("index").get("type") != null;
}
});
String docMapping = jsonBuilder().startObject().startObject("type")
.startObject("_all")
.field("enabled", false)
.endObject()
.endObject().endObject().string();
try {
client().admin().indices()
.preparePutMapping("index")
.setType("type")
.setSource(docMapping).get();
fail();
} catch (Exception e) {
// the mapping was created anyway with _all enabled: true, although the index request fails so we expect the update to fail
}
// make sure type was created
for (Client client : cluster()) {
GetMappingsResponse mapping = client.admin().indices().prepareGetMappings("index").setLocal(true).get();
assertNotNull(mapping.getMappings().get("index").get("type"));
client().prepareIndex("index", "type", "2").setSource("foo", "bar").get();
fail("Indexing request should have failed!");
} catch (MapperParsingException e) {
// expected
}
}
public void testConflictingDynamicMappingsBulk() {
// we don't use indexRandom because the order of requests is important here
createIndex("index");
client().prepareIndex("index", "type", "1").setSource("foo", 3).get();
BulkResponse bulkResponse = client().prepareBulk().add(client().prepareIndex("index", "type", "1").setSource("foo", 3)).get();
assertFalse(bulkResponse.hasFailures());
bulkResponse = client().prepareBulk().add(client().prepareIndex("index", "type", "2").setSource("foo", "bar")).get();
assertTrue(bulkResponse.hasFailures());
}
private static void assertMappingsHaveField(GetMappingsResponse mappings, String index, String type, String field) throws IOException {
ImmutableOpenMap<String, MappingMetaData> indexMappings = mappings.getMappings().get("index");
assertNotNull(indexMappings);
MappingMetaData typeMappings = indexMappings.get(type);
assertNotNull(typeMappings);
Map<String, Object> typeMappingsMap = typeMappings.getSourceAsMap();
Map<String, Object> properties = (Map<String, Object>) typeMappingsMap.get("properties");
assertTrue("Could not find [" + field + "] in " + typeMappingsMap.toString(), properties.containsKey(field));
}
public void testMappingsPropagatedToMasterNodeImmediately() throws IOException {
createIndex("index");
// works when the type has been dynamically created
client().prepareIndex("index", "type", "1").setSource("foo", 3).get();
GetMappingsResponse mappings = client().admin().indices().prepareGetMappings("index").setTypes("type").get();
assertMappingsHaveField(mappings, "index", "type", "foo");
// works if the type already existed
client().prepareIndex("index", "type", "1").setSource("bar", "baz").get();
mappings = client().admin().indices().prepareGetMappings("index").setTypes("type").get();
assertMappingsHaveField(mappings, "index", "type", "bar");
// works if we indexed an empty document
client().prepareIndex("index", "type2", "1").setSource().get();
mappings = client().admin().indices().prepareGetMappings("index").setTypes("type2").get();
assertTrue(mappings.getMappings().get("index").toString(), mappings.getMappings().get("index").containsKey("type2"));
}
public void testConcurrentDynamicUpdates() throws Throwable {
createIndex("index");
final Thread[] indexThreads = new Thread[32];
final CountDownLatch startLatch = new CountDownLatch(1);
final AtomicReference<Throwable> error = new AtomicReference<>();
for (int i = 0; i < indexThreads.length; ++i) {
final String id = Integer.toString(i);
indexThreads[i] = new Thread(new Runnable() {
@Override
public void run() {
try {
startLatch.await();
assertTrue(client().prepareIndex("index", "type", id).setSource("field" + id, "bar").get().isCreated());
} catch (Throwable t) {
error.compareAndSet(null, t);
}
}
});
indexThreads[i].start();
}
startLatch.countDown();
for (Thread thread : indexThreads) {
thread.join();
}
if (error.get() != null) {
throw error.get();
}
Thread.sleep(2000);
GetMappingsResponse mappings = client().admin().indices().prepareGetMappings("index").setTypes("type").get();
for (int i = 0; i < indexThreads.length; ++i) {
assertMappingsHaveField(mappings, "index", "type", "field" + i);
}
for (int i = 0; i < indexThreads.length; ++i) {
assertTrue(client().prepareGet("index", "type", Integer.toString(i)).get().isExists());
}
}
}

View File

@ -18,12 +18,10 @@
*/
package org.elasticsearch.index.mapper.dynamic;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
@ -38,7 +36,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
@ -46,8 +43,6 @@ import org.elasticsearch.index.mapper.StrictDynamicMappingException;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.equalTo;
@ -184,7 +179,7 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
assertTrue(mappers != null && mappers.isEmpty() == false);
}
public void testIndexingFailureDoesStillCreateType() throws IOException, InterruptedException {
public void testTypeNotCreatedOnIndexFailure() throws IOException, InterruptedException {
XContentBuilder mapping = jsonBuilder().startObject().startObject("_default_")
.field("dynamic", "strict")
.endObject().endObject();
@ -197,120 +192,9 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
} catch (StrictDynamicMappingException e) {
}
awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(java.lang.Object input) {
GetMappingsResponse currentMapping = client().admin().indices().prepareGetMappings("test").get();
return currentMapping.getMappings().get("test").get("type") != null;
}
});
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
assertNotNull(getMappingsResponse.getMappings().get("test").get("type"));
DocumentMapper mapper = indexService.mapperService().documentMapper("type");
assertNotNull(mapper);
}
public void testTypeCreatedProperly() throws IOException, InterruptedException {
XContentBuilder mapping = jsonBuilder().startObject().startObject("_default_")
.field("dynamic", "strict")
.startObject("properties")
.startObject("test_string")
.field("type", "string")
.endObject()
.endObject()
.endObject().endObject();
IndexService indexService = createIndex("test", ImmutableSettings.EMPTY, "_default_", mapping);
try {
client().prepareIndex().setIndex("test").setType("type").setSource(jsonBuilder().startObject().field("test", "test").endObject()).get();
fail();
} catch (StrictDynamicMappingException e) {
}
awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(java.lang.Object input) {
GetMappingsResponse currentMapping = client().admin().indices().prepareGetMappings("test").get();
return currentMapping.getMappings().get("test").get("type") != null;
}
});
//type should be in mapping
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
assertNotNull(getMappingsResponse.getMappings().get("test").get("type"));
client().prepareIndex().setIndex("test").setType("type").setSource(jsonBuilder().startObject().field("test_string", "test").endObject()).get();
client().admin().indices().prepareRefresh("test").get();
assertThat(client().prepareSearch("test").get().getHits().getTotalHits(), equalTo(1l));
DocumentMapper mapper = indexService.mapperService().documentMapper("type");
assertNotNull(mapper);
getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
assertNotNull(getMappingsResponse.getMappings().get("test").get("type"));
}
public void testFieldsCreatedWithPartialParsing() throws IOException, InterruptedException {
XContentBuilder mapping = jsonBuilder().startObject().startObject("doc")
.startObject("properties")
.startObject("z")
.field("type", "long")
.endObject()
.endObject()
.endObject().endObject();
IndexService indexService = createIndex("test", ImmutableSettings.EMPTY, "doc", mapping);
boolean create = randomBoolean();
if (create == false) {
// we want to test sometimes create and sometimes index so sometimes add the document before and sometimes not
client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource(jsonBuilder().startObject().field("z", 0).endObject()).get();
}
try {
IndexRequestBuilder indexRequest = client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource(jsonBuilder().startObject().field("a", "string").field("z", "string").endObject());
indexRequest.setCreate(create);
indexRequest.get();
fail();
} catch (MapperParsingException e) {
// this should fail because the field z is of type long
}
//type should be in mapping
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
assertNotNull(getMappingsResponse.getMappings().get("test").get("doc"));
client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource(jsonBuilder().startObject().field("a", "string").field("z", 0).endObject()).get();
client().admin().indices().prepareRefresh("test").get();
assertThat(client().prepareSearch("test").get().getHits().getTotalHits(), equalTo(1l));
// both fields should be in local mapper
DocumentMapper mapper = indexService.mapperService().documentMapper("doc");
assertNotNull(mapper.mappers().name("a"));
assertNotNull(mapper.mappers().name("z"));
// we have to wait here because the cluster state might not be immediately updated
assertTrue(awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(java.lang.Object input) {
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
return getMappingsResponse.getMappings().get("test").get("doc") != null;
}
}));
assertTrue(awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(java.lang.Object input) {
// both fields should be in the cluster state
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
assertNotNull(getMappingsResponse.getMappings().get("test").get("doc"));
Map<String, Object> mappings = null;
try {
mappings = getMappingsResponse.getMappings().get("test").get("doc").getSourceAsMap();
} catch (IOException e) {
fail("IOException when calling getSourceAsMap()" + e.getMessage());
}
return ((LinkedHashMap) mappings.get("properties")).get("a") != null && ((LinkedHashMap) mappings.get("properties")).get("z") != null;
}
}));
assertNull(getMappingsResponse.getMappings().get("test").get("type"));
}
private String serialize(ToXContent mapper) throws Exception {
@ -345,8 +229,8 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
public void testField() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject().endObject()
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
@ -386,8 +270,8 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
public void testIntroduceTwoFields() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject().endObject()
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
@ -407,8 +291,8 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
public void testObject() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject().endObject()
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
@ -427,8 +311,8 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
public void testArray() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject().endObject()
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
@ -467,8 +351,8 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
public void testComplexArray() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject().endObject()
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("type").endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);

View File

@ -21,9 +21,11 @@ package org.elasticsearch.index.mapper.dynamictemplate.genericstore;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
@ -39,9 +41,13 @@ public class GenericStoreDynamicTemplateTests extends ElasticsearchSingleNodeTes
@Test
public void testSimple() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping).get();
DocumentMapper docMapper = index.mapperService().documentMapper("person");
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/genericstore/test-data.json");
Document doc = docMapper.parse(new BytesArray(json)).rootDoc();
ParsedDocument parsedDoc = docMapper.parse(new BytesArray(json));
client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
Document doc = parsedDoc.rootDoc();
IndexableField f = doc.getField("name");
assertThat(f.name(), equalTo("name"));

View File

@ -21,8 +21,11 @@ package org.elasticsearch.index.mapper.dynamictemplate.pathmatch;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.MapperUtils;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
@ -39,9 +42,13 @@ public class PathMatchDynamicTemplateTests extends ElasticsearchSingleNodeTest {
@Test
public void testSimple() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping).get();
DocumentMapper docMapper = index.mapperService().documentMapper("person");
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/pathmatch/test-data.json");
Document doc = docMapper.parse(new BytesArray(json)).rootDoc();
ParsedDocument parsedDoc = docMapper.parse(new BytesArray(json));
client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
Document doc = parsedDoc.rootDoc();
IndexableField f = doc.getField("name");
assertThat(f.name(), equalTo("name"));

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
@ -46,10 +47,13 @@ public class SimpleDynamicTemplatesTests extends ElasticsearchSingleNodeTest {
.field("match_mapping_type", "string")
.startObject("mapping").field("index", "no").endObject()
.endObject().endObject().endArray().endObject().endObject();
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(builder.string());
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("person").setSource(builder.string()).get();
DocumentMapper docMapper = index.mapperService().documentMapper("person");
builder = JsonXContent.contentBuilder();
builder.startObject().field("_id", "1").field("s", "hello").field("l", 1).endObject();
docMapper.parse(builder.bytes());
ParsedDocument parsedDoc = docMapper.parse(builder.bytes());
client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
DocumentFieldMappers mappers = docMapper.mappers();
@ -66,9 +70,13 @@ public class SimpleDynamicTemplatesTests extends ElasticsearchSingleNodeTest {
@Test
public void testSimple() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping).get();
DocumentMapper docMapper = index.mapperService().documentMapper("person");
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json");
Document doc = docMapper.parse(new BytesArray(json)).rootDoc();
ParsedDocument parsedDoc = docMapper.parse(new BytesArray(json));
client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
Document doc = parsedDoc.rootDoc();
IndexableField f = doc.getField("name");
assertThat(f.name(), equalTo("name"));
@ -119,13 +127,13 @@ public class SimpleDynamicTemplatesTests extends ElasticsearchSingleNodeTest {
@Test
public void testSimpleWithXContentTraverse() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json");
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper docMapper = parser.parse(mapping);
docMapper.refreshSource();
docMapper = parser.parse(docMapper.mappingSource().string());
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("person").setSource(mapping).get();
DocumentMapper docMapper = index.mapperService().documentMapper("person");
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/dynamictemplate/simple/test-data.json");
Document doc = docMapper.parse(new BytesArray(json)).rootDoc();
ParsedDocument parsedDoc = docMapper.parse(new BytesArray(json));
client().admin().indices().preparePutMapping("test").setType("person").setSource(parsedDoc.dynamicMappingsUpdate().toString()).get();
Document doc = parsedDoc.rootDoc();
IndexableField f = doc.getField("name");
assertThat(f.name(), equalTo("name"));

View File

@ -486,7 +486,7 @@ public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.conflicts().length, equalTo(2));
// todo better way of checking conflict?
@ -498,7 +498,7 @@ public class GeoPointFieldMapperTests extends ElasticsearchSingleNodeTest {
.field("validate", true).field("normalize", true).endObject().endObject()
.endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
assertThat(mergeResult.hasConflicts(), equalTo(false));
}
}

View File

@ -311,7 +311,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest {
.field("orientation", "cw").endObject().endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
// check correct conflicts
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.conflicts().length, equalTo(3));
@ -338,7 +338,7 @@ public class GeoShapeFieldMapperTests extends ElasticsearchSingleNodeTest {
.startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m")
.field("distance_error_pct", 0.001).field("orientation", "cw").endObject().endObject().endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
// verify mapping changes, and ensure no failures
assertThat(mergeResult.hasConflicts(), equalTo(false));

View File

@ -102,7 +102,7 @@ public class IndexTypeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper mapperDisabled = parser.parse(mappingWithIndexDisabled);
mapperEnabled.merge(mapperDisabled, DocumentMapper.MergeFlags.mergeFlags().simulate(false));
mapperEnabled.merge(mapperDisabled.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertThat(mapperEnabled.IndexFieldMapper().enabled(), is(false));
}
@ -118,7 +118,7 @@ public class IndexTypeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
enabledMapper.merge(disabledMapper, DocumentMapper.MergeFlags.mergeFlags().simulate(false));
enabledMapper.merge(disabledMapper.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertThat(enabledMapper.indexMapper().enabled(), is(false));
}

View File

@ -162,11 +162,11 @@ public class FieldNamesFieldMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper mapperEnabled = parser.parse(enabledMapping);
DocumentMapper mapperDisabled = parser.parse(disabledMapping);
mapperEnabled.merge(mapperDisabled, DocumentMapper.MergeFlags.mergeFlags().simulate(false));
mapperEnabled.merge(mapperDisabled.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertFalse(mapperEnabled.rootMapper(FieldNamesFieldMapper.class).enabled());
mapperEnabled = parser.parse(enabledMapping);
mapperDisabled.merge(mapperEnabled, DocumentMapper.MergeFlags.mergeFlags().simulate(false));
mapperDisabled.merge(mapperEnabled.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertTrue(mapperEnabled.rootMapper(FieldNamesFieldMapper.class).enabled());
}
}

View File

@ -24,9 +24,12 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperUtils;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
@ -46,7 +49,9 @@ public class DoubleIndexingDocTest extends ElasticsearchSingleNodeTest {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject()
.endObject().endObject().string();
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
DocumentMapper mapper = index.mapperService().documentMapper("type");
ParsedDocument doc = mapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -57,6 +62,8 @@ public class DoubleIndexingDocTest extends ElasticsearchSingleNodeTest {
.startArray("field5").value(1).value(2).value(3).endArray()
.endObject()
.bytes());
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get();
writer.addDocument(doc.rootDoc());
writer.addDocument(doc.rootDoc());

View File

@ -51,13 +51,13 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2, mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(true));
assertThat(mergeResult.hasConflicts(), equalTo(false));
// since we are simulating, we should not have the age mapping
assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue());
assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue());
// now merge, don't simulate
mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
mergeResult = stage1.merge(stage2.mapping(), mergeFlags().simulate(false));
// there is still merge failures
assertThat(mergeResult.hasConflicts(), equalTo(false));
// but we have the age in
@ -76,7 +76,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper withDynamicMapper = parser.parse(withDynamicMapping);
assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
DocumentMapper.MergeResult mergeResult = mapper.merge(withDynamicMapper, mergeFlags().simulate(false));
DocumentMapper.MergeResult mergeResult = mapper.merge(withDynamicMapper.mapping(), mergeFlags().simulate(false));
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(mapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
}
@ -93,12 +93,12 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().endObject().string();
DocumentMapper nestedMapper = parser.parse(nestedMapping);
DocumentMapper.MergeResult mergeResult = objectMapper.merge(nestedMapper, mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = objectMapper.merge(nestedMapper.mapping(), mergeFlags().simulate(true));
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.conflicts().length, equalTo(1));
assertThat(mergeResult.conflicts()[0], equalTo("object mapping [obj] can't be changed from non-nested to nested"));
mergeResult = nestedMapper.merge(objectMapper, mergeFlags().simulate(true));
mergeResult = nestedMapper.merge(objectMapper.mapping(), mergeFlags().simulate(true));
assertThat(mergeResult.conflicts().length, equalTo(1));
assertThat(mergeResult.conflicts()[0], equalTo("object mapping [obj] can't be changed from nested to non-nested"));
}
@ -117,7 +117,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper changed = parser.parse(mapping2);
assertThat(((NamedAnalyzer) existing.mappers().name("field").mapper().searchAnalyzer()).name(), equalTo("whitespace"));
DocumentMapper.MergeResult mergeResult = existing.merge(changed, mergeFlags().simulate(false));
DocumentMapper.MergeResult mergeResult = existing.merge(changed.mapping(), mergeFlags().simulate(false));
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(((NamedAnalyzer) existing.mappers().name("field").mapper().searchAnalyzer()).name(), equalTo("keyword"));
@ -137,7 +137,7 @@ public class TestMergeMapperTests extends ElasticsearchSingleNodeTest {
DocumentMapper changed = parser.parse(mapping2);
assertThat(((NamedAnalyzer) existing.mappers().name("field").mapper().searchAnalyzer()).name(), equalTo("whitespace"));
DocumentMapper.MergeResult mergeResult = existing.merge(changed, mergeFlags().simulate(false));
DocumentMapper.MergeResult mergeResult = existing.merge(changed.mapping(), mergeFlags().simulate(false));
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(((NamedAnalyzer) existing.mappers().name("field").mapper().searchAnalyzer()).name(), equalTo("standard"));

View File

@ -62,10 +62,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping2.json");
DocumentMapper docMapper2 = parser.parse(mapping);
DocumentMapper.MergeResult mergeResult = docMapper.merge(docMapper2, mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper2, mergeFlags().simulate(false));
docMapper.merge(docMapper2.mapping(), mergeFlags().simulate(false));
assertNotSame(IndexOptions.NONE, docMapper.mappers().name("name").mapper().fieldType().indexOptions());
@ -85,10 +85,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/test-mapping3.json");
DocumentMapper docMapper3 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper3, mergeFlags().simulate(true));
mergeResult = docMapper.merge(docMapper3.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper3, mergeFlags().simulate(false));
docMapper.merge(docMapper3.mapping(), mergeFlags().simulate(false));
assertNotSame(IndexOptions.NONE, docMapper.mappers().name("name").mapper().fieldType().indexOptions());
@ -103,10 +103,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
DocumentMapper docMapper4 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper4, mergeFlags().simulate(true));
mergeResult = docMapper.merge(docMapper4.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper4, mergeFlags().simulate(false));
docMapper.merge(docMapper4.mapping(), mergeFlags().simulate(false));
assertNotSame(IndexOptions.NONE, docMapper.mappers().name("name").mapper().fieldType().indexOptions());
@ -138,10 +138,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade1.json");
DocumentMapper docMapper2 = parser.parse(mapping);
DocumentMapper.MergeResult mergeResult = docMapper.merge(docMapper2, mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = docMapper.merge(docMapper2.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper2, mergeFlags().simulate(false));
docMapper.merge(docMapper2.mapping(), mergeFlags().simulate(false));
assertNotSame(IndexOptions.NONE, docMapper.mappers().name("name").mapper().fieldType().indexOptions());
@ -161,10 +161,10 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade2.json");
DocumentMapper docMapper3 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper3, mergeFlags().simulate(true));
mergeResult = docMapper.merge(docMapper3.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(false));
docMapper.merge(docMapper3, mergeFlags().simulate(false));
docMapper.merge(docMapper3.mapping(), mergeFlags().simulate(false));
assertNotSame(IndexOptions.NONE, docMapper.mappers().name("name").mapper().fieldType().indexOptions());
@ -177,12 +177,12 @@ public class JavaMultiFieldMergeTests extends ElasticsearchSingleNodeTest {
mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/merge/upgrade3.json");
DocumentMapper docMapper4 = parser.parse(mapping);
mergeResult = docMapper.merge(docMapper4, mergeFlags().simulate(true));
mergeResult = docMapper.merge(docMapper4.mapping(), mergeFlags().simulate(true));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.conflicts()[0], equalTo("mapper [name] has different index values"));
assertThat(mergeResult.conflicts()[1], equalTo("mapper [name] has different store values"));
mergeResult = docMapper.merge(docMapper4, mergeFlags().simulate(false));
mergeResult = docMapper.merge(docMapper4.mapping(), mergeFlags().simulate(false));
assertThat(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts(), equalTo(true));
assertNotSame(IndexOptions.NONE, docMapper.mappers().name("name").mapper().fieldType().indexOptions());

View File

@ -26,8 +26,13 @@ import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
@ -39,7 +44,10 @@ import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.hamcrest.Matchers.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
/**
*/
@ -51,7 +59,9 @@ public class SimpleNumericTests extends ElasticsearchSingleNodeTest {
.field("numeric_detection", true)
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
DocumentMapper defaultMapper = index.mapperService().documentMapper("type");
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -59,6 +69,8 @@ public class SimpleNumericTests extends ElasticsearchSingleNodeTest {
.field("s_double", "100.0")
.endObject()
.bytes());
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get();
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long");
assertThat(mapper, instanceOf(LongFieldMapper.class));
@ -72,7 +84,9 @@ public class SimpleNumericTests extends ElasticsearchSingleNodeTest {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get();
DocumentMapper defaultMapper = index.mapperService().documentMapper("type");
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -80,6 +94,8 @@ public class SimpleNumericTests extends ElasticsearchSingleNodeTest {
.field("s_double", "100.0")
.endObject()
.bytes());
assertNotNull(doc.dynamicMappingsUpdate());
assertAcked(client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get());
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long");
assertThat(mapper, instanceOf(StringFieldMapper.class));

View File

@ -114,7 +114,7 @@ public class SizeMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
enabledMapper.merge(disabledMapper, DocumentMapper.MergeFlags.mergeFlags().simulate(false));
enabledMapper.merge(disabledMapper.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertThat(enabledMapper.SizeFieldMapper().enabled(), is(false));
}
}

View File

@ -499,7 +499,7 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest {
String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject()
.endObject().endObject().endObject().endObject().string();
MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping), MergeFlags.mergeFlags().simulate(false));
MergeResult mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), MergeFlags.mergeFlags().simulate(false));
assertFalse(Arrays.toString(mergeResult.conflicts()), mergeResult.hasConflicts());
doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
@ -514,7 +514,7 @@ public class SimpleStringMappingTests extends ElasticsearchSingleNodeTest {
updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject()
.endObject().endObject().endObject().endObject().string();
mergeResult = defaultMapper.merge(parser.parse(updatedMapping), MergeFlags.mergeFlags());
mergeResult = defaultMapper.merge(parser.parse(updatedMapping).mapping(), MergeFlags.mergeFlags());
assertTrue(mergeResult.hasConflicts());
assertEquals(1, mergeResult.conflicts().length);
assertTrue(mergeResult.conflicts()[0].contains("cannot enable norms"));

View File

@ -141,7 +141,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.endObject().endObject().string();
DocumentMapper disabledMapper = parser.parse(disabledMapping);
enabledMapper.merge(disabledMapper, DocumentMapper.MergeFlags.mergeFlags().simulate(false));
enabledMapper.merge(disabledMapper.mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertThat(enabledMapper.timestampFieldMapper().enabled(), is(false));
}
@ -502,7 +502,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.startObject("_timestamp").field("enabled", randomBoolean()).startObject("fielddata").field("loading", "eager").field("format", "array").endObject().field("store", "yes").endObject()
.endObject().endObject().string();
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertThat(mergeResult.conflicts().length, equalTo(0));
assertThat(docMapper.timestampFieldMapper().fieldDataType().getLoading(), equalTo(FieldMapper.Loading.EAGER));
assertThat(docMapper.timestampFieldMapper().fieldDataType().getFormat(indexSettings), equalTo("array"));
@ -518,8 +518,6 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.field("default", "1970-01-01")
.startObject("fielddata").field("format", "doc_values").endObject()
.endObject()
.startObject("properties")
.endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
@ -578,7 +576,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.endObject()
.endObject().endObject().string();
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
String[] expectedConflicts = {"mapper [_timestamp] has different index values", "mapper [_timestamp] has different store values", "Cannot update default in _timestamp value. Value is 1970-01-01 now encountering 1970-01-02", "Cannot update path in _timestamp value. Value is foo path in merged mapping is bar", "mapper [_timestamp] has different tokenize values"};
for (String conflict : mergeResult.conflicts()) {
@ -612,7 +610,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
.endObject()
.endObject().endObject().string();
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
List<String> expectedConflicts = new ArrayList<>();
expectedConflicts.add("mapper [_timestamp] has different index values");
expectedConflicts.add("mapper [_timestamp] has different tokenize values");
@ -673,7 +671,7 @@ public class TimestampMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper docMapper = parser.parse(mapping1);
docMapper.refreshSource();
docMapper = parser.parse(docMapper.mappingSource().string());
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping2), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
assertThat(mergeResult.conflicts().length, equalTo(conflict == null ? 0:1));
if (conflict != null) {
assertThat(mergeResult.conflicts()[0], containsString(conflict));

View File

@ -117,7 +117,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper mapperWithTtl = parser.parse(mappingWithTtl);
DocumentMapper.MergeFlags mergeFlags = DocumentMapper.MergeFlags.mergeFlags().simulate(false);
DocumentMapper.MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl, mergeFlags);
DocumentMapper.MergeResult mergeResult = mapperWithoutTtl.merge(mapperWithTtl.mapping(), mergeFlags);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(true));
@ -144,7 +144,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper updatedMapper = parser.parse(updatedMapping);
DocumentMapper.MergeFlags mergeFlags = DocumentMapper.MergeFlags.mergeFlags().simulate(false);
DocumentMapper.MergeResult mergeResult = initialMapper.merge(updatedMapper, mergeFlags);
DocumentMapper.MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), mergeFlags);
assertThat(mergeResult.hasConflicts(), equalTo(false));
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
@ -159,7 +159,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
DocumentMapper updatedMapper = parser.parse(mappingWithTtlDisabled);
DocumentMapper.MergeFlags mergeFlags = DocumentMapper.MergeFlags.mergeFlags().simulate(true);
DocumentMapper.MergeResult mergeResult = initialMapper.merge(updatedMapper, mergeFlags);
DocumentMapper.MergeResult mergeResult = initialMapper.merge(updatedMapper.mapping(), mergeFlags);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true));
@ -197,7 +197,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
public void testNoConflictIfNothingSetAndDisabledLater() throws Exception {
IndexService indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type");
XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d");
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDisabled.string()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(randomBoolean()));
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDisabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(randomBoolean()));
assertFalse(mergeResult.hasConflicts());
}
@ -205,7 +205,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
public void testNoConflictIfNothingSetAndEnabledLater() throws Exception {
IndexService indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type");
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(randomBoolean()));
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(randomBoolean()));
assertFalse(mergeResult.hasConflicts());
}
@ -214,7 +214,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
IndexService indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type", mappingWithTtlEnabled);
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertFalse(mergeResult.hasConflicts());
CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":true,\"default\":360000},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
@ -227,7 +227,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
CompressedString mappingAfterCreation = indexService.mapperService().documentMapper("type").refreshSource();
assertThat(mappingAfterCreation, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m");
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithOnlyDefaultSet.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertFalse(mergeResult.hasConflicts());
CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
assertThat(mappingAfterMerge, equalTo(new CompressedString("{\"type\":{\"_ttl\":{\"enabled\":false},\"properties\":{\"field\":{\"type\":\"string\"}}}}")));
@ -241,7 +241,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
IndexService indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type", mappingWithTtl);
CompressedString mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
XContentBuilder mappingWithTtlDifferentDefault = getMappingWithTtlEnabled("7d");
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDifferentDefault.string()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlDifferentDefault.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied
CompressedString mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
@ -253,7 +253,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled();
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
@ -265,7 +265,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingBeforeMerge = indexService.mapperService().documentMapper("type").mappingSource();
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - no mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
@ -276,7 +276,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
mappingWithoutTtl = getMappingWithTtlDisabled("6d");
indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type", mappingWithoutTtl);
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();
@ -286,7 +286,7 @@ public class TTLMappingTests extends ElasticsearchSingleNodeTest {
// check if switching simulate flag off works if nothing was applied in the beginning
indexService = createIndex("testindex", ImmutableSettings.settingsBuilder().build(), "type");
mappingWithTtlEnabled = getMappingWithTtlEnabled("7d");
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingWithTtlEnabled.string()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
assertFalse(mergeResult.hasConflicts());
// make sure simulate flag actually worked - mappings applied
mappingAfterMerge = indexService.mapperService().documentMapper("type").refreshSource();

View File

@ -79,7 +79,7 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
private void testNoConflictWhileMergingAndMappingChanged(XContentBuilder mapping, XContentBuilder mappingUpdate, XContentBuilder expectedMapping) throws IOException {
IndexService indexService = createIndex("test", ImmutableSettings.settingsBuilder().build(), "type", mapping);
// simulate like in MetaDataMappingService#putMapping
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(false));
// assure we have no conflicts
assertThat(mergeResult.conflicts().length, equalTo(0));
// make sure mappings applied
@ -103,7 +103,7 @@ public class UpdateMappingTests extends ElasticsearchSingleNodeTest {
IndexService indexService = createIndex("test", ImmutableSettings.settingsBuilder().build(), "type", mapping);
CompressedString mappingBeforeUpdate = indexService.mapperService().documentMapper("type").mappingSource();
// simulate like in MetaDataMappingService#putMapping
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
DocumentMapper.MergeResult mergeResult = indexService.mapperService().documentMapper("type").merge(indexService.mapperService().parse("type", new CompressedString(mappingUpdate.bytes()), true).mapping(), DocumentMapper.MergeFlags.mergeFlags().simulate(true));
// assure we have conflicts
assertThat(mergeResult.conflicts().length, equalTo(1));
// make sure simulate flag actually worked - no mappings applied

View File

@ -1 +1 @@
{"type":{"_timestamp":{"enabled":false},"_index":{"enabled":false},"_size":{"enabled":false},"properties":{}}}
{"type":{"_timestamp":{"enabled":false},"_index":{"enabled":false},"_size":{"enabled":false}}}

View File

@ -25,8 +25,9 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.elasticsearch.test.TestSearchContext;
@ -57,7 +58,9 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin
MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedString(mapping), true);
mapperService.documentMapper("person").parse(new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
ParsedDocument doc = mapperService.documentMapper("person").parse(new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();
queryParser = injector.getInstance(IndexQueryParserService.class);
}

View File

@ -25,8 +25,9 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.elasticsearch.test.TestSearchContext;
@ -38,7 +39,9 @@ import java.io.IOException;
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
/**
*
@ -56,7 +59,9 @@ public class IndexQueryParserFilterDateRangeTimezoneTests extends ElasticsearchS
MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedString(mapping), true);
mapperService.documentMapper("person").parse(new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
ParsedDocument doc = mapperService.documentMapper("person").parse(new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();
queryParser = injector.getInstance(IndexQueryParserService.class);
}

View File

@ -23,11 +23,43 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.index.*;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.queries.*;
import org.apache.lucene.search.*;
import org.apache.lucene.search.spans.*;
import org.apache.lucene.queries.BoostingQuery;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.queries.FilterClause;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.queries.TermsFilter;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.NumericRangeFilter;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.PrefixFilter;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
import org.apache.lucene.search.spans.SpanFirstQuery;
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanNotQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.spatial.prefix.IntersectsPrefixTreeFilter;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
@ -36,11 +68,22 @@ import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.termvectors.*;
import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.lucene.search.*;
import org.elasticsearch.common.lucene.search.AndFilter;
import org.elasticsearch.common.lucene.search.MatchAllDocsFilter;
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
import org.elasticsearch.common.lucene.search.NotFilter;
import org.elasticsearch.common.lucene.search.OrFilter;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.lucene.search.RegexpFilter;
import org.elasticsearch.common.lucene.search.XBooleanFilter;
import org.elasticsearch.common.lucene.search.function.BoostScoreFunction;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.WeightFactorFunction;
@ -51,7 +94,9 @@ import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
@ -60,7 +105,6 @@ import org.elasticsearch.index.search.geo.GeoDistanceFilter;
import org.elasticsearch.index.search.geo.GeoPolygonFilter;
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxFilter;
import org.elasticsearch.index.search.morelikethis.MoreLikeThisFetchService;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.hamcrest.Matchers;
@ -76,12 +120,50 @@ import java.util.List;
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.FilterBuilders.*;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.index.query.RegexpFlag.*;
import static org.elasticsearch.index.query.FilterBuilders.andFilter;
import static org.elasticsearch.index.query.FilterBuilders.boolFilter;
import static org.elasticsearch.index.query.FilterBuilders.notFilter;
import static org.elasticsearch.index.query.FilterBuilders.orFilter;
import static org.elasticsearch.index.query.FilterBuilders.prefixFilter;
import static org.elasticsearch.index.query.FilterBuilders.queryFilter;
import static org.elasticsearch.index.query.FilterBuilders.rangeFilter;
import static org.elasticsearch.index.query.FilterBuilders.termFilter;
import static org.elasticsearch.index.query.FilterBuilders.termsFilter;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.boostingQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.disMaxQuery;
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisQuery;
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
import static org.elasticsearch.index.query.QueryBuilders.regexpQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanFirstQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanOrQuery;
import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.QueryBuilders.termsQuery;
import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery;
import static org.elasticsearch.index.query.RegexpFlag.COMPLEMENT;
import static org.elasticsearch.index.query.RegexpFlag.EMPTY;
import static org.elasticsearch.index.query.RegexpFlag.INTERSECTION;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.factorFunction;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
/**
*
@ -101,7 +183,9 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedString(mapping), true);
mapperService.documentMapper("person").parse(new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
ParsedDocument doc = mapperService.documentMapper("person").parse(new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();
queryParser = indexService.queryParserService();
}

View File

@ -25,10 +25,6 @@ import com.google.common.collect.Sets;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;

View File

@ -1645,7 +1645,7 @@ public class SimpleChildQuerySearchTests extends ElasticsearchIntegrationTest {
.endObject().endObject()).get();
fail();
} catch (MergeMappingException e) {
assertThat(e.getMessage(), equalTo("Merge failed with failures {[The _parent field's type option can't be changed]}"));
assertThat(e.getMessage(), equalTo("Merge failed with failures {[The _parent field's type option can't be changed: [null]->[parent]]}"));
}
}

View File

@ -441,7 +441,7 @@ public final class InternalTestCluster extends TestCluster {
}
if (random.nextBoolean()) {
builder.put(MappingUpdatedAction.INDICES_MAPPING_ADDITIONAL_MAPPING_CHANGE_TIME, RandomInts.randomIntBetween(random, 0, 500) /*milliseconds*/);
builder.put(MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT, new TimeValue(RandomInts.randomIntBetween(random, 10, 30), TimeUnit.SECONDS));
}
if (random.nextInt(10) == 0) {