Revert "[mappings] update dynamic fields in mapping on master even if parsing fails for the rest of doc"

This reverts commit d9a1540948.
This commit is contained in:
Britta Weber 2015-03-06 19:23:41 +01:00
parent 3abf11611b
commit 0ca1e31392
9 changed files with 29 additions and 91 deletions

View File

@ -437,7 +437,7 @@ public class DocumentMapper implements ToXContent {
ParseContext.InternalParseContext context = cache.get();
if (source.type() != null && !source.type().equals(this.type)) {
throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + this.type + "]", context.mappingsModified());
throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + this.type + "]");
}
source.type(this.type);
@ -455,7 +455,7 @@ public class DocumentMapper implements ToXContent {
int countDownTokens = 0;
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new MapperParsingException("Malformed content, must start with an object", context.mappingsModified());
throw new MapperParsingException("Malformed content, must start with an object");
}
boolean emptyDoc = false;
token = parser.nextToken();
@ -463,7 +463,7 @@ public class DocumentMapper implements ToXContent {
// empty doc, we can handle it...
emptyDoc = true;
} else if (token != XContentParser.Token.FIELD_NAME) {
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist", context.mappingsModified());
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
}
for (RootMapper rootMapper : rootMappersOrdered) {
@ -489,10 +489,10 @@ public class DocumentMapper implements ToXContent {
// Throw a more meaningful message if the document is empty.
if (source.source() != null && source.source().length() == 0) {
throw new MapperParsingException("failed to parse, document is empty", context.mappingsModified());
throw new MapperParsingException("failed to parse, document is empty");
}
throw new MapperParsingException("failed to parse", e, context.mappingsModified());
throw new MapperParsingException("failed to parse", e);
} finally {
// only close the parser when its not provided externally
if (source.parser() == null && parser != null) {

View File

@ -28,30 +28,13 @@ public class MapperParsingException extends MapperException {
public MapperParsingException(String message) {
super(message);
mappingsModified = false;
}
public boolean isMappingsModified() {
return mappingsModified;
}
private boolean mappingsModified = false;
public MapperParsingException(String message, boolean mappingsModified) {
super(message);
this.mappingsModified = mappingsModified;
}
public MapperParsingException(String message, Throwable cause, boolean mappingsModified) {
super(message, cause);
this.mappingsModified = mappingsModified;
}
public MapperParsingException(String message, Throwable cause) {
super(message, cause);
this.mappingsModified = false;
}
@Override
public RestStatus status() {
return RestStatus.BAD_REQUEST;

View File

@ -24,8 +24,8 @@ import org.elasticsearch.rest.RestStatus;
*/
public class StrictDynamicMappingException extends MapperParsingException {
public StrictDynamicMappingException(String path, String fieldName, boolean mappingsModified) {
super("mapping set to strict, dynamic introduction of [" + fieldName + "] within [" + path + "] is not allowed", mappingsModified);
public StrictDynamicMappingException(String path, String fieldName) {
super("mapping set to strict, dynamic introduction of [" + fieldName + "] within [" + path + "] is not allowed");
}
@Override

View File

@ -429,7 +429,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
}
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e, context.mappingsModified());
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
}
multiFields.parse(this, context);
if (copyTo != null) {
@ -1077,7 +1077,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
ObjectMapper mapper = context.docMapper().objectMappers().get(objectPath);
if (mapper == null) {
//TODO: Create an object dynamically?
throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]", context.mappingsModified());
throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]");
}
context.path().add(objectPath);

View File

@ -307,7 +307,7 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements Intern
@Override
public void postParse(ParseContext context) throws IOException {
if (context.id() == null && !context.sourceToParse().flyweight()) {
throw new MapperParsingException("No id found while parsing the content source", context.mappingsModified());
throw new MapperParsingException("No id found while parsing the content source");
}
// it either get built in the preParse phase, or get parsed...
}
@ -329,7 +329,7 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements Intern
// we are in the parse Phase
String id = parser.text();
if (context.id() != null && !context.id().equals(id)) {
throw new MapperParsingException("Provided id [" + context.id() + "] does not match the content one [" + id + "]", context.mappingsModified());
throw new MapperParsingException("Provided id [" + context.id() + "] does not match the content one [" + id + "]");
}
context.id(id);
} // else we are in the pre/post parse phase

View File

@ -499,7 +499,7 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
if (token.isValue() && !allowValue()) {
// if we are parsing an object but it is just a value, its only allowed on root level parsers with there
// is a field name with the same name as the type
throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but found a concrete value", context.mappingsModified());
throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but found a concrete value");
}
if (nested.isNested()) {
@ -543,7 +543,7 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
} else if (token == XContentParser.Token.VALUE_NULL) {
serializeNullValue(context, currentFieldName);
} else if (token == null) {
throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?", context.mappingsModified());
throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?");
} else if (token.isValue()) {
serializeValue(context, currentFieldName, token);
}
@ -585,18 +585,18 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
if (mapper != null) {
if (mapper instanceof FieldMapper) {
if (!((FieldMapper) mapper).supportsNullValue()) {
throw new MapperParsingException("no object mapping found for null value in [" + lastFieldName + "]", context.mappingsModified());
throw new MapperParsingException("no object mapping found for null value in [" + lastFieldName + "]");
}
}
mapper.parse(context);
} else if (dynamic == Dynamic.STRICT) {
throw new StrictDynamicMappingException(fullPath, lastFieldName, context.mappingsModified());
throw new StrictDynamicMappingException(fullPath, lastFieldName);
}
}
private void serializeObject(final ParseContext context, String currentFieldName) throws IOException {
if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + name + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]", context.mappingsModified());
throw new MapperParsingException("object mapping [" + name + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
context.path().add(currentFieldName);
@ -609,7 +609,7 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
dynamic = context.root().dynamic();
}
if (dynamic == Dynamic.STRICT) {
throw new StrictDynamicMappingException(fullPath, currentFieldName, context.mappingsModified());
throw new StrictDynamicMappingException(fullPath, currentFieldName);
} else if (dynamic == Dynamic.TRUE) {
// we sync here just so we won't add it twice. Its not the end of the world
// to sync here since next operations will get it before
@ -661,7 +661,7 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
dynamic = context.root().dynamic();
}
if (dynamic == Dynamic.STRICT) {
throw new StrictDynamicMappingException(fullPath, arrayFieldName, context.mappingsModified());
throw new StrictDynamicMappingException(fullPath, arrayFieldName);
} else if (dynamic == Dynamic.TRUE) {
// we sync here just so we won't add it twice. Its not the end of the world
// to sync here since next operations will get it before
@ -741,7 +741,7 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
} else if (token == XContentParser.Token.VALUE_NULL) {
serializeNullValue(context, lastFieldName);
} else if (token == null) {
throw new MapperParsingException("object mapping for [" + name + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?", context.mappingsModified());
throw new MapperParsingException("object mapping for [" + name + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?");
} else {
serializeValue(context, lastFieldName, token);
}
@ -750,7 +750,7 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
private void serializeValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException {
if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + name + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]", context.mappingsModified());
throw new MapperParsingException("object mapping [" + name + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
Mapper mapper = mappers.get(currentFieldName);
if (mapper != null) {
@ -766,7 +766,7 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
dynamic = context.root().dynamic();
}
if (dynamic == Dynamic.STRICT) {
throw new StrictDynamicMappingException(fullPath, currentFieldName, context.mappingsModified());
throw new StrictDynamicMappingException(fullPath, currentFieldName);
}
if (dynamic == Dynamic.FALSE) {
return;

View File

@ -231,7 +231,7 @@ public class RootObjectMapper extends ObjectMapper {
String mappingType = dynamicTemplate.mappingType(dynamicType);
Mapper.TypeParser typeParser = parserContext.typeParser(mappingType);
if (typeParser == null) {
throw new MapperParsingException("failed to find type parsed [" + mappingType + "] for [" + name + "]", context.mappingsModified());
throw new MapperParsingException("failed to find type parsed [" + mappingType + "] for [" + name + "]");
}
return typeParser.parse(name, dynamicTemplate.mappingForName(name, dynamicType), parserContext);
}

View File

@ -434,7 +434,7 @@ public class IndexShard extends AbstractIndexShardComponent {
ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper);
return new Engine.Create(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, state != IndexShardState.STARTED || canHaveDuplicates, autoGeneratedId);
} catch (Throwable t) {
if (docMapper.v2() || (t instanceof MapperParsingException && ((MapperParsingException)t).isMappingsModified())) {
if (docMapper.v2()) {
throw new WriteFailureException(t, docMapper.v1().type());
} else {
throw t;
@ -467,7 +467,7 @@ public class IndexShard extends AbstractIndexShardComponent {
ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper);
return new Engine.Index(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, state != IndexShardState.STARTED || canHaveDuplicates);
} catch (Throwable t) {
if (docMapper.v2() || (t instanceof MapperParsingException && ((MapperParsingException)t).isMappingsModified())) {
if (docMapper.v2()) {
throw new WriteFailureException(t, docMapper.v1().type());
} else {
throw t;

View File

@ -20,18 +20,18 @@ package org.elasticsearch.index.mapper.dynamic;
import com.google.common.base.Predicate;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.StrictDynamicMappingException;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.equalTo;
@ -242,49 +242,4 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
assertNotNull(getMappingsResponse.getMappings().get("test").get("type"));
}
@Test
public void testFieldsCreatedWithPartialParsing() throws IOException, InterruptedException {
XContentBuilder mapping = jsonBuilder().startObject().startObject("doc")
.startObject("properties")
.startObject("z")
.field("type", "long")
.endObject()
.endObject()
.endObject().endObject();
IndexService indexService = createIndex("test", ImmutableSettings.EMPTY, "doc", mapping);
boolean create = randomBoolean();
if (create == false) {
// we want to test sometimes create and sometimes index so sometimes add the document before and sometimes not
client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource(jsonBuilder().startObject().field("z", 0).endObject()).get();
}
try {
IndexRequestBuilder indexRequest = client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource(jsonBuilder().startObject().field("a", "string").field("z", "string").endObject());
indexRequest.setCreate(create);
indexRequest.get();
fail();
} catch (MapperParsingException e) {
// this should fail because the field z is of type long
}
//type should be in mapping
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
assertNotNull(getMappingsResponse.getMappings().get("test").get("doc"));
client().prepareIndex().setIndex("test").setType("doc").setId("1").setSource(jsonBuilder().startObject().field("a", "string").field("z", 0).endObject()).get();
client().admin().indices().prepareRefresh("test").get();
assertThat(client().prepareSearch("test").get().getHits().getTotalHits(), equalTo(1l));
// both fields should be in local mapper
DocumentMapper mapper = indexService.mapperService().documentMapper("doc");
assertNotNull(mapper.mappers().name("a"));
assertNotNull(mapper.mappers().name("z"));
// both fields should be in the cluster state
getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
assertNotNull(getMappingsResponse.getMappings().get("test").get("doc"));
Map<String, Object> mappings = getMappingsResponse.getMappings().get("test").get("doc").getSourceAsMap();
assertNotNull(((LinkedHashMap) mappings.get("properties")).get("a"));
assertNotNull(((LinkedHashMap) mappings.get("properties")).get("z"));
}
}