Default to not accepting type wrapper in indexing requests
Currently it is possible to index a document as: ``` POST /myindex/mytype/1 { "foo"...} ``` or as: ``` POST /myindex/mytype/1 { "mytype": { "foo"... } } ``` This makes indexing non-deterministic and fields can be misinterpreted as type names. This changes makes Elasticsearch accept only the first form by default, ie without the type wrapper. This can be changed by setting `index.mapping.allow_type_wrapper` to `true`` when creating the index. Closes #4484
This commit is contained in:
parent
a1efa1f7aa
commit
b379bf5668
|
@ -33,7 +33,7 @@ The result of the above index operation is:
|
|||
|
||||
The index operation automatically creates an index if it has not been
|
||||
created before (check out the
|
||||
<<indices-create-index,create index API>> for manually
|
||||
<<indices-create-index,create index API>> for manually
|
||||
creating an index), and also automatically creates a
|
||||
dynamic type mapping for the specific type if one has not yet been
|
||||
created (check out the <<indices-put-mapping,put mapping>>
|
||||
|
@ -44,12 +44,21 @@ objects will automatically be added to the mapping definition of the
|
|||
type specified. Check out the <<mapping,mapping>>
|
||||
section for more information on mapping definitions.
|
||||
|
||||
Though explained on the <<mapping,mapping>> section,
|
||||
it's important to note that the format of the JSON document can also
|
||||
include the type (very handy when using JSON mappers), for example:
|
||||
Note that the format of the JSON document can also include the type (very handy
|
||||
when using JSON mappers) if the `index.mapping.allow_type_wrapper` setting is
|
||||
set to true, for example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
$ curl -XPOST 'http://localhost:9200/twitter' -d '{
|
||||
"settings": {
|
||||
"index": {
|
||||
"mapping.allow_type_wrapper": true
|
||||
}
|
||||
}
|
||||
}'
|
||||
{"acknowledged":true}
|
||||
|
||||
$ curl -XPUT 'http://localhost:9200/twitter/tweet/1' -d '{
|
||||
"tweet" : {
|
||||
"user" : "kimchy",
|
||||
|
|
|
@ -239,6 +239,8 @@ public class DocumentMapper implements ToXContent {
|
|||
}
|
||||
};
|
||||
|
||||
public static final String ALLOW_TYPE_WRAPPER = "index.mapping.allow_type_wrapper";
|
||||
|
||||
private final String index;
|
||||
|
||||
private final Settings indexSettings;
|
||||
|
@ -494,18 +496,15 @@ public class DocumentMapper implements ToXContent {
|
|||
} else if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
|
||||
}
|
||||
if (type.equals(parser.currentName())) {
|
||||
// first field is the same as the type, this might be because the type is provided, and the object exists within it
|
||||
// or because there is a valid field that by chance is named as the type
|
||||
|
||||
// Note, in this case, we only handle plain value types, an object type will be analyzed as if it was the type itself
|
||||
// and other same level fields will be ignored
|
||||
token = parser.nextToken();
|
||||
// first field is the same as the type, this might be because the
|
||||
// type is provided, and the object exists within it or because
|
||||
// there is a valid field that by chance is named as the type.
|
||||
// Because of this, by default wrapping a document in a type is
|
||||
// disabled, but can be enabled by setting
|
||||
// index.mapping.allow_type_wrapper to true
|
||||
if (type.equals(parser.currentName()) && indexSettings.getAsBoolean(ALLOW_TYPE_WRAPPER, false)) {
|
||||
parser.nextToken();
|
||||
countDownTokens++;
|
||||
// commented out, allow for same type with START_OBJECT, we do our best to handle it except for the above corner case
|
||||
// if (token != XContentParser.Token.START_OBJECT) {
|
||||
// throw new MapperException("Malformed content, a field with the same name as the type must be an object with the properties/fields within it");
|
||||
// }
|
||||
}
|
||||
|
||||
for (RootMapper rootMapper : rootMappersOrdered) {
|
||||
|
|
|
@ -853,6 +853,6 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
|
|||
}
|
||||
|
||||
private String source(String id, String nameValue) {
|
||||
return "{ type1 : { \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" } }";
|
||||
return "{ \"id\" : \"" + id + "\", \"name\" : \"" + nameValue + "\" }";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -231,11 +231,11 @@ public class SimpleAllMapperTests extends ElasticsearchTestCase {
|
|||
// reparse it
|
||||
DocumentMapper builtDocMapper = MapperTestUtils.newParser().parse(builtMapping);
|
||||
|
||||
byte[] json = jsonBuilder().startObject().startObject("test")
|
||||
byte[] json = jsonBuilder().startObject()
|
||||
.field("foo", "bar")
|
||||
.field("_id", 1)
|
||||
.field("foobar", "foobar")
|
||||
.endObject().endObject().bytes().array();
|
||||
.endObject().bytes().array();
|
||||
Document doc = builtDocMapper.parse(new BytesArray(json)).rootDoc();
|
||||
AllField field = (AllField) doc.getField("_all");
|
||||
if (enabled) {
|
||||
|
|
|
@ -1,20 +1,18 @@
|
|||
{
|
||||
"person":{
|
||||
"_boost":3.7,
|
||||
"_id":"1",
|
||||
"name":{
|
||||
"first":"shay",
|
||||
"last":"banon"
|
||||
"_boost":3.7,
|
||||
"_id":"1",
|
||||
"name":{
|
||||
"first":"shay",
|
||||
"last":"banon"
|
||||
},
|
||||
"address":{
|
||||
"first":{
|
||||
"location":"first location"
|
||||
},
|
||||
"address":{
|
||||
"first":{
|
||||
"location":"first location"
|
||||
},
|
||||
"last":{
|
||||
"location":"last location"
|
||||
}
|
||||
},
|
||||
"simple1":1,
|
||||
"simple2":2
|
||||
}
|
||||
}
|
||||
"last":{
|
||||
"location":"last location"
|
||||
}
|
||||
},
|
||||
"simple1":1,
|
||||
"simple2":2
|
||||
}
|
||||
|
|
|
@ -22,6 +22,8 @@ package org.elasticsearch.index.mapper.simple;
|
|||
import com.google.common.base.Charsets;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
|
@ -128,4 +130,18 @@ public class SimpleMapperTests extends ElasticsearchTestCase {
|
|||
assertThat(e.getMessage(), equalTo("failed to parse, document is empty"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTypeWrapperWithSetting() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
|
||||
Settings settings = ImmutableSettings.settingsBuilder().put("index.mapping.allow_type_wrapper", true).build();
|
||||
DocumentMapper docMapper = MapperTestUtils.newParser(settings).parse(mapping);
|
||||
|
||||
assertThat((String) docMapper.meta().get("param1"), equalTo("value1"));
|
||||
|
||||
BytesReference json = new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/mapper/simple/test1-withtype.json"));
|
||||
Document doc = docMapper.parse(json).rootDoc();
|
||||
assertThat(doc.get(docMapper.uidMapper().names().indexName()), equalTo(Uid.createUid("person", "1")));
|
||||
assertThat(doc.get(docMapper.mappers().name("first").mapper().names().indexName()), equalTo("shay"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
{
|
||||
person:{
|
||||
_boost:3.7,
|
||||
_id:"1",
|
||||
name:{
|
||||
first:"shay",
|
||||
last:"banon"
|
||||
},
|
||||
address:{
|
||||
first:{
|
||||
location:"first location"
|
||||
},
|
||||
last:{
|
||||
location:"last location"
|
||||
}
|
||||
},
|
||||
age:32,
|
||||
birthDate:"1977-11-15",
|
||||
nerd:true,
|
||||
dogs:["buck", "mia"],
|
||||
complex:[
|
||||
{
|
||||
value1:"value1"
|
||||
},
|
||||
{
|
||||
value2:"value2"
|
||||
}
|
||||
],
|
||||
complex2:[
|
||||
[
|
||||
{
|
||||
value1:"value1"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
value2:"value2"
|
||||
}
|
||||
]
|
||||
],
|
||||
nullValue:null
|
||||
}
|
||||
}
|
|
@ -1,43 +1,41 @@
|
|||
{
|
||||
person:{
|
||||
_boost:3.7,
|
||||
_id:"1",
|
||||
name:{
|
||||
first:"shay",
|
||||
last:"banon"
|
||||
_boost:3.7,
|
||||
_id:"1",
|
||||
name:{
|
||||
first:"shay",
|
||||
last:"banon"
|
||||
},
|
||||
address:{
|
||||
first:{
|
||||
location:"first location"
|
||||
},
|
||||
address:{
|
||||
first:{
|
||||
location:"first location"
|
||||
},
|
||||
last:{
|
||||
location:"last location"
|
||||
}
|
||||
last:{
|
||||
location:"last location"
|
||||
}
|
||||
},
|
||||
age:32,
|
||||
birthDate:"1977-11-15",
|
||||
nerd:true,
|
||||
dogs:["buck", "mia"],
|
||||
complex:[
|
||||
{
|
||||
value1:"value1"
|
||||
},
|
||||
age:32,
|
||||
birthDate:"1977-11-15",
|
||||
nerd:true,
|
||||
dogs:["buck", "mia"],
|
||||
complex:[
|
||||
{
|
||||
value2:"value2"
|
||||
}
|
||||
],
|
||||
complex2:[
|
||||
[
|
||||
{
|
||||
value1:"value1"
|
||||
},
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
value2:"value2"
|
||||
}
|
||||
],
|
||||
complex2:[
|
||||
[
|
||||
{
|
||||
value1:"value1"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
value2:"value2"
|
||||
}
|
||||
]
|
||||
],
|
||||
nullValue:null
|
||||
}
|
||||
}
|
||||
]
|
||||
],
|
||||
nullValue:null
|
||||
}
|
||||
|
|
|
@ -26,9 +26,7 @@ import org.elasticsearch.index.mapper.ParsedDocument;
|
|||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -68,9 +66,9 @@ public class ParseDocumentTypeLevelsTests extends ElasticsearchTestCase {
|
|||
.endObject().endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().get("test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("inner.inner_field"), equalTo("inner_value"));
|
||||
assertThat(doc.rootDoc().get("type.test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("type.test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("type.inner.inner_field"), equalTo("inner_value"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -109,10 +107,10 @@ public class ParseDocumentTypeLevelsTests extends ElasticsearchTestCase {
|
|||
.endObject().endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().get("type"), equalTo("value_type"));
|
||||
assertThat(doc.rootDoc().get("test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("inner.inner_field"), equalTo("inner_value"));
|
||||
assertThat(doc.rootDoc().get("type.type"), equalTo("value_type"));
|
||||
assertThat(doc.rootDoc().get("type.test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("type.test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("type.inner.inner_field"), equalTo("inner_value"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -131,9 +129,9 @@ public class ParseDocumentTypeLevelsTests extends ElasticsearchTestCase {
|
|||
.bytes());
|
||||
|
||||
// in this case, we analyze the type object as the actual document, and ignore the other same level fields
|
||||
assertThat(doc.rootDoc().get("type_field"), equalTo("type_value"));
|
||||
assertThat(doc.rootDoc().get("test1"), nullValue());
|
||||
assertThat(doc.rootDoc().get("test2"), nullValue());
|
||||
assertThat(doc.rootDoc().get("type.type_field"), equalTo("type_value"));
|
||||
assertThat(doc.rootDoc().get("test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("test2"), equalTo("value2"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -151,10 +149,10 @@ public class ParseDocumentTypeLevelsTests extends ElasticsearchTestCase {
|
|||
.endObject().endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().get("type.type_field"), equalTo("type_value"));
|
||||
assertThat(doc.rootDoc().get("test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("inner.inner_field"), equalTo("inner_value"));
|
||||
assertThat(doc.rootDoc().get("type.type.type_field"), equalTo("type_value"));
|
||||
assertThat(doc.rootDoc().get("type.test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("type.test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("type.inner.inner_field"), equalTo("inner_value"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -172,10 +170,10 @@ public class ParseDocumentTypeLevelsTests extends ElasticsearchTestCase {
|
|||
.endObject().endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().get("type"), equalTo("value_type"));
|
||||
assertThat(doc.rootDoc().get("test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("inner.inner_field"), equalTo("inner_value"));
|
||||
assertThat(doc.rootDoc().get("type.type"), equalTo("value_type"));
|
||||
assertThat(doc.rootDoc().get("type.test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("type.test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("type.inner.inner_field"), equalTo("inner_value"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -193,10 +191,10 @@ public class ParseDocumentTypeLevelsTests extends ElasticsearchTestCase {
|
|||
.endObject().endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().get("type"), equalTo("value_type"));
|
||||
assertThat(doc.rootDoc().get("test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("inner.inner_field"), equalTo("inner_value"));
|
||||
assertThat(doc.rootDoc().get("type.type"), equalTo("value_type"));
|
||||
assertThat(doc.rootDoc().get("type.test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("type.test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("type.inner.inner_field"), equalTo("inner_value"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -236,9 +234,9 @@ public class ParseDocumentTypeLevelsTests extends ElasticsearchTestCase {
|
|||
.endObject().endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().get("type.type_field"), equalTo("type_value"));
|
||||
assertThat(doc.rootDoc().get("test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("inner.inner_field"), equalTo("inner_value"));
|
||||
assertThat(doc.rootDoc().get("type.type.type_field"), equalTo("type_value"));
|
||||
assertThat(doc.rootDoc().get("type.test1"), equalTo("value1"));
|
||||
assertThat(doc.rootDoc().get("type.test2"), equalTo("value2"));
|
||||
assertThat(doc.rootDoc().get("type.inner.inner_field"), equalTo("inner_value"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,47 +1,45 @@
|
|||
{
|
||||
person:{
|
||||
_boost:3.7,
|
||||
_id:"1",
|
||||
name:{
|
||||
first:"shay",
|
||||
last:"banon"
|
||||
_boost:3.7,
|
||||
_id:"1",
|
||||
name:{
|
||||
first:"shay",
|
||||
last:"banon"
|
||||
},
|
||||
address:{
|
||||
first:{
|
||||
location:"first location"
|
||||
},
|
||||
address:{
|
||||
first:{
|
||||
location:"first location"
|
||||
},
|
||||
last:{
|
||||
location:"last location"
|
||||
}
|
||||
last:{
|
||||
location:"last location"
|
||||
}
|
||||
},
|
||||
age:32,
|
||||
birthDate:"1977-11-15",
|
||||
nerd:true,
|
||||
dogs:["buck", "mia"],
|
||||
complex:[
|
||||
{
|
||||
value1:"value1"
|
||||
},
|
||||
age:32,
|
||||
birthDate:"1977-11-15",
|
||||
nerd:true,
|
||||
dogs:["buck", "mia"],
|
||||
complex:[
|
||||
{
|
||||
value2:"value2"
|
||||
}
|
||||
],
|
||||
complex2:[
|
||||
[
|
||||
{
|
||||
value1:"value1"
|
||||
},
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
value2:"value2"
|
||||
}
|
||||
],
|
||||
complex2:[
|
||||
[
|
||||
{
|
||||
value1:"value1"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
value2:"value2"
|
||||
}
|
||||
]
|
||||
],
|
||||
nullValue:null,
|
||||
"location":{
|
||||
"lat":1.1,
|
||||
"lon":1.2
|
||||
}
|
||||
]
|
||||
],
|
||||
nullValue:null,
|
||||
"location":{
|
||||
"lat":1.1,
|
||||
"lon":1.2
|
||||
}
|
||||
}
|
|
@ -164,11 +164,6 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
.field("field2", "value")
|
||||
.endObject().endObject();
|
||||
|
||||
XContentBuilder docWithType = XContentFactory.jsonBuilder().startObject().startObject("doc").startObject("type1")
|
||||
.field("field1", 1)
|
||||
.field("field2", "value")
|
||||
.endObject().endObject().endObject();
|
||||
|
||||
PercolateResponse response = client().preparePercolate().setSource(doc)
|
||||
.setIndices("test").setDocumentType("type1")
|
||||
.execute().actionGet();
|
||||
|
@ -187,13 +182,6 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
assertThat(response.getMatches(), arrayWithSize(1));
|
||||
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("test1"));
|
||||
|
||||
response = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type1")
|
||||
.setSource(docWithType).execute().actionGet();
|
||||
assertMatchCount(response, 1l);
|
||||
assertThat(response.getMatches(), arrayWithSize(1));
|
||||
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("test1"));
|
||||
|
||||
// add second query...
|
||||
client().prepareIndex("test", PercolatorService.TYPE_NAME, "test2")
|
||||
.setSource(XContentFactory.jsonBuilder().startObject().field("query", termQuery("field1", 1)).endObject())
|
||||
|
@ -430,7 +418,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
percolate = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type1")
|
||||
.setSource(jsonBuilder().startObject().startObject("doc").startObject("type1").field("field1", "value2").endObject().endObject().endObject())
|
||||
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value2").endObject().endObject())
|
||||
.execute().actionGet();
|
||||
assertMatchCount(percolate, 1l);
|
||||
assertThat(percolate.getMatches(), arrayWithSize(1));
|
||||
|
@ -471,7 +459,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
percolate = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type1")
|
||||
.setSource(jsonBuilder().startObject().startObject("doc").startObject("type1").field("field1", "value2").endObject().endObject().endObject())
|
||||
.setSource(jsonBuilder().startObject().startObject("doc").field("field1", "value2").endObject().endObject())
|
||||
.execute().actionGet();
|
||||
assertMatchCount(percolate, 1l);
|
||||
assertThat(percolate.getMatches(), arrayWithSize(1));
|
||||
|
@ -487,7 +475,7 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
.execute().actionGet();
|
||||
|
||||
PercolateSourceBuilder sourceBuilder = new PercolateSourceBuilder()
|
||||
.setDoc(docBuilder().setDoc(jsonBuilder().startObject().startObject("type1").field("field1", "value2").endObject().endObject()))
|
||||
.setDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", "value2").endObject()))
|
||||
.setQueryBuilder(termQuery("color", "red"));
|
||||
percolate = client().preparePercolate()
|
||||
.setIndices("test").setDocumentType("type1")
|
||||
|
@ -533,9 +521,9 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
|
|||
logger.info("--> percolate a document");
|
||||
PercolateResponse percolate = client().preparePercolate().setIndices("test").setDocumentType("type1")
|
||||
.setSource(jsonBuilder().startObject()
|
||||
.startObject("doc").startObject("type1")
|
||||
.startObject("doc")
|
||||
.field("field1", "value1")
|
||||
.endObject().endObject()
|
||||
.endObject()
|
||||
.endObject())
|
||||
.execute().actionGet();
|
||||
assertMatchCount(percolate, 1l);
|
||||
|
|
Loading…
Reference in New Issue