Merge pull request #13085 from rjernst/fix/13017

Fix document parsing to properly ignore entire type when disabled
This commit is contained in:
Ryan Ernst 2015-08-25 08:58:07 -07:00
commit d77bcb0d9b
2 changed files with 91 additions and 25 deletions

View File

@ -100,33 +100,36 @@ class DocumentParser implements Closeable {
context.reset(parser, new ParseContext.Document(), source);
// will result in START_OBJECT
int countDownTokens = 0;
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new MapperParsingException("Malformed content, must start with an object");
}
boolean emptyDoc = false;
token = parser.nextToken();
if (token == XContentParser.Token.END_OBJECT) {
// empty doc, we can handle it...
emptyDoc = true;
} else if (token != XContentParser.Token.FIELD_NAME) {
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
}
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
metadataMapper.preParse(context);
}
if (!emptyDoc) {
Mapper update = parseObject(context, mapping.root);
if (update != null) {
context.addDynamicMappingsUpdate(update);
if (mapping.root.isEnabled()) {
boolean emptyDoc = false;
token = parser.nextToken();
if (token == XContentParser.Token.END_OBJECT) {
// empty doc, we can handle it...
emptyDoc = true;
} else if (token != XContentParser.Token.FIELD_NAME) {
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
}
}
for (int i = 0; i < countDownTokens; i++) {
parser.nextToken();
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
metadataMapper.preParse(context);
}
if (emptyDoc == false) {
Mapper update = parseObject(context, mapping.root);
if (update != null) {
context.addDynamicMappingsUpdate(update);
}
}
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
metadataMapper.postParse(context);
}
} else {
// entire type is disabled
parser.skipChildren();
}
// try to parse the next token, this should be null if the object is ended properly
@ -135,12 +138,11 @@ class DocumentParser implements Closeable {
&& source.parser() == null && parser != null) {
// only check for end of tokens if we created the parser here
token = parser.nextToken();
assert token == null; // double check, in tests, that we didn't end parsing early
if (token != null) {
throw new IllegalArgumentException("Malformed content, found extra data after parsing: " + token);
}
}
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
metadataMapper.postParse(context);
}
} catch (Throwable e) {
// if its already a mapper parsing exception, no need to wrap it...
if (e instanceof MapperParsingException) {

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.json.JsonXContentParser;
import org.elasticsearch.test.ESSingleNodeTestCase;
// TODO: make this a real unit test
public class DocumentParserTests extends ESSingleNodeTestCase {
public void testTypeDisabled() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.field("enabled", false).endObject().endObject().string();
DocumentMapper mapper = mapperParser.parse(mapping);
BytesReference bytes = XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject().bytes();
ParsedDocument doc = mapper.parse("test", "type", "1", bytes);
assertNull(doc.rootDoc().getField("field"));
}
public void testFieldDisabled() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").field("enabled", false).endObject()
.startObject("bar").field("type", "integer").endObject()
.endObject().endObject().endObject().string();
DocumentMapper mapper = mapperParser.parse(mapping);
BytesReference bytes = XContentFactory.jsonBuilder()
.startObject()
.field("foo", "1234")
.field("bar", 10)
.endObject().bytes();
ParsedDocument doc = mapper.parse("test", "type", "1", bytes);
assertNull(doc.rootDoc().getField("foo"));
assertNotNull(doc.rootDoc().getField("bar"));
}
}