Mappings: Fix document parsing to properly ignore entire type when disabled

Currently when an entire type is disabled, our document parser will end
parsing on the first field of the document. This blows up the recently
added check that parsing did not silently skip any tokens (ie whether
there was garbage leftover).

This change fixes the parser to correctly skip the entire document when
the type is disabled.

closes #13017
This commit is contained in:
Ryan Ernst 2015-08-24 10:23:08 -07:00
parent 339486b943
commit f7b3fe05e4
2 changed files with 91 additions and 25 deletions

View File

@ -100,11 +100,11 @@ class DocumentParser implements Closeable {
context.reset(parser, new ParseContext.Document(), source); context.reset(parser, new ParseContext.Document(), source);
// will result in START_OBJECT // will result in START_OBJECT
int countDownTokens = 0;
XContentParser.Token token = parser.nextToken(); XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) { if (token != XContentParser.Token.START_OBJECT) {
throw new MapperParsingException("Malformed content, must start with an object"); throw new MapperParsingException("Malformed content, must start with an object");
} }
if (mapping.root.isEnabled()) {
boolean emptyDoc = false; boolean emptyDoc = false;
token = parser.nextToken(); token = parser.nextToken();
if (token == XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.END_OBJECT) {
@ -117,16 +117,19 @@ class DocumentParser implements Closeable {
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
metadataMapper.preParse(context); metadataMapper.preParse(context);
} }
if (emptyDoc == false) {
if (!emptyDoc) {
Mapper update = parseObject(context, mapping.root); Mapper update = parseObject(context, mapping.root);
if (update != null) { if (update != null) {
context.addDynamicMappingsUpdate(update); context.addDynamicMappingsUpdate(update);
} }
} }
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
metadataMapper.postParse(context);
}
for (int i = 0; i < countDownTokens; i++) { } else {
parser.nextToken(); // entire type is disabled
parser.skipChildren();
} }
// try to parse the next token, this should be null if the object is ended properly // try to parse the next token, this should be null if the object is ended properly
@ -135,12 +138,11 @@ class DocumentParser implements Closeable {
&& source.parser() == null && parser != null) { && source.parser() == null && parser != null) {
// only check for end of tokens if we created the parser here // only check for end of tokens if we created the parser here
token = parser.nextToken(); token = parser.nextToken();
assert token == null; // double check, in tests, that we didn't end parsing early if (token != null) {
throw new IllegalArgumentException("Malformed content, found extra data after parsing: " + token);
}
} }
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
metadataMapper.postParse(context);
}
} catch (Throwable e) { } catch (Throwable e) {
// if its already a mapper parsing exception, no need to wrap it... // if its already a mapper parsing exception, no need to wrap it...
if (e instanceof MapperParsingException) { if (e instanceof MapperParsingException) {

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.json.JsonXContentParser;
import org.elasticsearch.test.ESSingleNodeTestCase;
// TODO: make this a real unit test
public class DocumentParserTests extends ESSingleNodeTestCase {
public void testTypeDisabled() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.field("enabled", false).endObject().endObject().string();
DocumentMapper mapper = mapperParser.parse(mapping);
BytesReference bytes = XContentFactory.jsonBuilder()
.startObject()
.field("field", "1234")
.endObject().bytes();
ParsedDocument doc = mapper.parse("test", "type", "1", bytes);
assertNull(doc.rootDoc().getField("field"));
}
public void testFieldDisabled() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").field("enabled", false).endObject()
.startObject("bar").field("type", "integer").endObject()
.endObject().endObject().endObject().string();
DocumentMapper mapper = mapperParser.parse(mapping);
BytesReference bytes = XContentFactory.jsonBuilder()
.startObject()
.field("foo", "1234")
.field("bar", 10)
.endObject().bytes();
ParsedDocument doc = mapper.parse("test", "type", "1", bytes);
assertNull(doc.rootDoc().getField("foo"));
assertNotNull(doc.rootDoc().getField("bar"));
}
}