more mapping work, ignoreDuplicates renamed to ignoreConflicts

This commit is contained in:
kimchy 2010-03-01 13:55:47 +02:00
parent 568254887f
commit 0ef4000842
10 changed files with 77 additions and 61 deletions

View File

@ -39,7 +39,7 @@ import static org.elasticsearch.util.TimeValue.*;
* {@link org.elasticsearch.client.Requests#putMappingRequest(String...)}.
*
* <p>If the mappings already exists, the new mappings will be merged with the new one. If there are elements
* that can't be merged are detected, the request will be rejected unless the {@link #ignoreDuplicates(boolean)}
* that can't be merged are detected, the request will be rejected unless the {@link #ignoreConflicts(boolean)}
* is set. In such a case, the duplicate mappings will be rejected.
*
* @author kimchy (shay.banon)
@ -57,7 +57,7 @@ public class PutMappingRequest extends MasterNodeOperationRequest {
private TimeValue timeout = new TimeValue(10, TimeUnit.SECONDS);
private boolean ignoreDuplicates = true;
private boolean ignoreConflicts = true;
PutMappingRequest() {
}
@ -147,19 +147,19 @@ public class PutMappingRequest extends MasterNodeOperationRequest {
/**
* If there is already a mapping definition registered against the type, then it will be merged. If there are
* elements that can't be merged are detected, the request will be rejected unless the
* {@link #ignoreDuplicates(boolean)} is set. In such a case, the duplicate mappings will be rejected.
* {@link #ignoreConflicts(boolean)} is set. In such a case, the duplicate mappings will be rejected.
*/
public boolean ignoreDuplicates() {
return ignoreDuplicates;
public boolean ignoreConflicts() {
return ignoreConflicts;
}
/**
* If there is already a mapping definition registered against the type, then it will be merged. If there are
* elements that can't be merged are detected, the request will be rejected unless the
* {@link #ignoreDuplicates(boolean)} is set. In such a case, the duplicate mappings will be rejected.
* {@link #ignoreConflicts(boolean)} is set. In such a case, the duplicate mappings will be rejected.
*/
public PutMappingRequest ignoreDuplicates(boolean ignoreDuplicates) {
this.ignoreDuplicates = ignoreDuplicates;
public PutMappingRequest ignoreConflicts(boolean ignoreDuplicates) {
this.ignoreConflicts = ignoreDuplicates;
return this;
}
@ -174,7 +174,7 @@ public class PutMappingRequest extends MasterNodeOperationRequest {
}
mappingSource = in.readUTF();
timeout = readTimeValue(in);
ignoreDuplicates = in.readBoolean();
ignoreConflicts = in.readBoolean();
}
@Override public void writeTo(DataOutput out) throws IOException {
@ -195,6 +195,6 @@ public class PutMappingRequest extends MasterNodeOperationRequest {
}
out.writeUTF(mappingSource);
timeout.writeTo(out);
out.writeBoolean(ignoreDuplicates);
out.writeBoolean(ignoreConflicts);
}
}

View File

@ -76,7 +76,7 @@ public class TransportPutMappingAction extends TransportMasterNodeOperationActio
@Override protected PutMappingResponse masterOperation(PutMappingRequest request) throws ElasticSearchException {
final String[] indices = processIndices(clusterService.state(), request.indices());
MetaDataService.PutMappingResult result = metaDataService.putMapping(indices, request.type(), request.mappingSource(), request.ignoreDuplicates(), request.timeout());
MetaDataService.PutMappingResult result = metaDataService.putMapping(indices, request.type(), request.mappingSource(), request.ignoreConflicts(), request.timeout());
return new PutMappingResponse(result.acknowledged());
}

View File

@ -35,6 +35,7 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.InvalidTypeNameException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndexMissingException;
@ -219,8 +220,8 @@ public class MetaDataService extends AbstractComponent {
if (existingMapper == null) {
existingMapper = updatedMapper;
} else {
// merge from the updated into the existing, ignore duplicates (we know we have them, we just want the new ones)
existingMapper.merge(updatedMapper, mergeFlags().simulate(false).ignoreDuplicates(true));
// merge from the updated into the existing, ignore conflicts (we know we have them, we just want the new ones)
existingMapper.merge(updatedMapper, mergeFlags().simulate(false));
}
// build the updated mapping source
final String updatedMappingSource = existingMapper.buildSource();
@ -236,7 +237,7 @@ public class MetaDataService extends AbstractComponent {
});
}
public synchronized PutMappingResult putMapping(final String[] indices, String mappingType, final String mappingSource, boolean ignoreDuplicates, TimeValue timeout) throws ElasticSearchException {
public synchronized PutMappingResult putMapping(final String[] indices, String mappingType, final String mappingSource, boolean ignoreConflicts, TimeValue timeout) throws ElasticSearchException {
ClusterState clusterState = clusterService.state();
for (String index : indices) {
IndexRoutingTable indexTable = clusterState.routingTable().indicesRouting().get(index);
@ -255,8 +256,12 @@ public class MetaDataService extends AbstractComponent {
newMappers.put(index, newMapper);
DocumentMapper existingMapper = indexService.mapperService().documentMapper(mappingType);
if (existingMapper != null) {
// first simulate and throw an exception if something goes wrong
existingMapper.merge(newMapper, mergeFlags().simulate(true).ignoreDuplicates(ignoreDuplicates));
// first, simulate
DocumentMapper.MergeResult mergeResult = existingMapper.merge(newMapper, mergeFlags().simulate(true));
// if we have conflicts, and we are not supposed to ignore them, throw an exception
if (!ignoreConflicts && mergeResult.hasConflicts()) {
throw new MergeMappingException(mergeResult.conflicts());
}
existingMappers.put(index, newMapper);
}
} else {
@ -282,7 +287,7 @@ public class MetaDataService extends AbstractComponent {
if (existingMappers.containsKey(entry.getKey())) {
// we have an existing mapping, do the merge here (on the master), it will automatically update the mapping source
DocumentMapper existingMapper = existingMappers.get(entry.getKey());
existingMapper.merge(newMapper, mergeFlags().simulate(false).ignoreDuplicates(ignoreDuplicates));
existingMapper.merge(newMapper, mergeFlags().simulate(false));
// use the merged mapping source
mapping = new Tuple<String, String>(existingMapper.type(), existingMapper.buildSource());
} else {
@ -331,6 +336,9 @@ public class MetaDataService extends AbstractComponent {
return new PutMappingResult(acknowledged);
}
/**
* The result of a putting mapping.
*/
public static class PutMappingResult {
private final boolean acknowledged;

View File

@ -43,11 +43,6 @@ public interface DocumentMapper {
*/
String buildSource() throws FailedToGenerateSourceMapperException;
/**
* Merges this document mapper with the provided document mapper.
*/
void merge(DocumentMapper mergeWith, MergeFlags mergeFlags) throws MergeMappingException;
UidFieldMapper uidMapper();
IdFieldMapper idMapper();
@ -93,11 +88,43 @@ public interface DocumentMapper {
*/
ParsedDocument parse(byte[] source) throws MapperParsingException;
/**
* Merges this document mapper with the provided document mapper. If there are conflicts, the
* {@link MergeResult} will hold them.
*/
MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) throws MergeMappingException;
/**
* Adds a field mapper listener.
*/
void addFieldMapperListener(FieldMapperListener fieldMapperListener, boolean includeExisting);
/**
* A result of a merge.
*/
public static class MergeResult {
private final String[] conflicts;
public MergeResult(String[] conflicts) {
this.conflicts = conflicts;
}
/**
* Does the merge have conflicts or not?
*/
public boolean hasConflicts() {
return conflicts.length > 0;
}
/**
* The merge conflicts.
*/
public String[] conflicts() {
return this.conflicts;
}
}
public static class MergeFlags {
public static MergeFlags mergeFlags() {
@ -106,8 +133,6 @@ public interface DocumentMapper {
private boolean simulate = true;
private boolean ignoreDuplicates = false;
public MergeFlags() {
}
@ -122,15 +147,6 @@ public interface DocumentMapper {
this.simulate = simulate;
return this;
}
public boolean ignoreDuplicates() {
return ignoreDuplicates;
}
public MergeFlags ignoreDuplicates(boolean ignoreDuplicates) {
this.ignoreDuplicates = ignoreDuplicates;
return this;
}
}
/**

View File

@ -347,17 +347,15 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
}
}
@Override public synchronized void merge(DocumentMapper mergeWith, MergeFlags mergeFlags) throws MergeMappingException {
@Override public synchronized MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) {
JsonDocumentMapper jsonMergeWith = (JsonDocumentMapper) mergeWith;
JsonMergeContext mergeContext = new JsonMergeContext(this, mergeFlags);
rootObjectMapper.merge(jsonMergeWith.rootObjectMapper, mergeContext);
if (mergeContext.hasFailures()) {
throw new MergeMappingException(mergeContext.buildFailures());
}
if (!mergeFlags.simulate()) {
// update the source to the merged one
mappingSource = buildSource();
}
return new MergeResult(mergeContext.buildConflicts());
}
@Override public String buildSource() throws FailedToGenerateSourceMapperException {

View File

@ -321,10 +321,7 @@ public abstract class JsonFieldMapper<T> implements FieldMapper<T>, JsonMapper {
}
@Override public void merge(JsonMapper mergeWith, JsonMergeContext mergeContext) throws MergeMappingException {
if (mergeContext.mergeFlags().ignoreDuplicates()) {
return;
}
mergeContext.addFailure("Mapper [" + names.fullName() + "] exists, can't merge");
mergeContext.addConflict("Mapper [" + names.fullName() + "] exists, can't merge");
}
@Override public int sortType() {

View File

@ -33,7 +33,7 @@ public class JsonMergeContext {
private final DocumentMapper.MergeFlags mergeFlags;
private final List<String> mergeFailures = Lists.newArrayList();
private final List<String> mergeConflicts = Lists.newArrayList();
public JsonMergeContext(JsonDocumentMapper documentMapper, DocumentMapper.MergeFlags mergeFlags) {
this.documentMapper = documentMapper;
@ -48,15 +48,15 @@ public class JsonMergeContext {
return mergeFlags;
}
public void addFailure(String mergeFailure) {
mergeFailures.add(mergeFailure);
public void addConflict(String mergeFailure) {
mergeConflicts.add(mergeFailure);
}
public boolean hasFailures() {
return !mergeFailures.isEmpty();
public boolean hasConflicts() {
return !mergeConflicts.isEmpty();
}
public String[] buildFailures() {
return mergeFailures.toArray(new String[mergeFailures.size()]);
public String[] buildConflicts() {
return mergeConflicts.toArray(new String[mergeConflicts.size()]);
}
}

View File

@ -342,7 +342,7 @@ public class JsonObjectMapper implements JsonMapper {
@Override public void merge(JsonMapper mergeWith, JsonMergeContext mergeContext) throws MergeMappingException {
if (!(mergeWith instanceof JsonObjectMapper)) {
mergeContext.addFailure("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]");
mergeContext.addConflict("Can't merge a non object mapping [" + mergeWith.name() + "] with an object mapping [" + name() + "]");
return;
}
JsonObjectMapper mergeWithObject = (JsonObjectMapper) mergeWith;

View File

@ -57,7 +57,7 @@ public class RestPutMappingAction extends BaseRestHandler {
putMappingRequest.type(request.param("type"));
putMappingRequest.mappingSource(request.contentAsString());
putMappingRequest.timeout(request.paramAsTime("timeout", timeValueSeconds(10)));
putMappingRequest.ignoreDuplicates(request.paramAsBoolean("ignoreDuplicates", putMappingRequest.ignoreDuplicates()));
putMappingRequest.ignoreConflicts(request.paramAsBoolean("ignoreConflicts", putMappingRequest.ignoreConflicts()));
client.admin().indices().execPutMapping(putMappingRequest, new ActionListener<PutMappingResponse>() {
@Override public void onResponse(PutMappingResponse response) {
try {

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper.json.merge.test1;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.json.JsonDocumentMapper;
import org.elasticsearch.index.mapper.json.JsonDocumentMapperParser;
import org.testng.annotations.Test;
@ -42,19 +42,16 @@ public class Test1MergeMapperTests {
JsonDocumentMapper stage1 = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(stage1Mapping);
String stage2Mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/merge/test1/stage2.json");
JsonDocumentMapper stage2 = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(stage2Mapping);
try {
stage1.merge(stage2, mergeFlags().simulate(true));
assert false : "can't change field from number to type";
} catch (MergeMappingException e) {
// all is well
}
// now, test with ignore duplicates
stage1.merge(stage2, mergeFlags().ignoreDuplicates(true).simulate(true));
DocumentMapper.MergeResult mergeResult = stage1.merge(stage2, mergeFlags().simulate(true));
assertThat(mergeResult.hasConflicts(), equalTo(true));
// since we are simulating, we should not have the age mapping
assertThat(stage1.mappers().smartName("age"), nullValue());
// now merge, ignore duplicates and don't simulate
stage1.merge(stage2, mergeFlags().ignoreDuplicates(true).simulate(false));
// now merge, don't simulate
mergeResult = stage1.merge(stage2, mergeFlags().simulate(false));
// there is still merge failures
assertThat(mergeResult.hasConflicts(), equalTo(true));
// but we have the age in
assertThat(stage1.mappers().smartName("age"), notNullValue());
}
}