Convert some more mapping tests to MapperServiceTestCase (#62089)
We don't need to extend ESSingleNodeTestCase for all these tests.
This commit is contained in:
parent
5bca671f57
commit
5f05eef7e3
|
@ -19,37 +19,22 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.common.Strings;
|
public class CamelCaseFieldNameTests extends MapperServiceTestCase {
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
|
||||||
import org.elasticsearch.index.IndexService;
|
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
|
||||||
|
|
||||||
public class CamelCaseFieldNameTests extends ESSingleNodeTestCase {
|
|
||||||
public void testCamelCaseFieldNameStaysAsIs() throws Exception {
|
public void testCamelCaseFieldNameStaysAsIs() throws Exception {
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
MapperService mapperService = createMapperService(mapping(b -> {}));
|
||||||
.endObject().endObject());
|
|
||||||
|
|
||||||
IndexService index = createIndex("test");
|
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("thisIsCamelCase", "value1")));
|
||||||
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get();
|
|
||||||
DocumentMapper documentMapper = index.mapperService().documentMapper("type");
|
|
||||||
|
|
||||||
ParsedDocument doc = documentMapper.parse(new SourceToParse("test", "type", "1",
|
|
||||||
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
|
|
||||||
.field("thisIsCamelCase", "value1")
|
|
||||||
.endObject()),
|
|
||||||
XContentType.JSON));
|
|
||||||
|
|
||||||
assertNotNull(doc.dynamicMappingsUpdate());
|
assertNotNull(doc.dynamicMappingsUpdate());
|
||||||
client().admin().indices().preparePutMapping("test").setType("type")
|
|
||||||
.setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get();
|
|
||||||
|
|
||||||
documentMapper = index.mapperService().documentMapper("type");
|
merge(mapperService, dynamicMapping(doc.dynamicMappingsUpdate()));
|
||||||
|
|
||||||
|
DocumentMapper documentMapper = mapperService.documentMapper();
|
||||||
assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase"));
|
assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase"));
|
||||||
assertNull(documentMapper.mappers().getMapper("this_is_camel_case"));
|
assertNull(documentMapper.mappers().getMapper("this_is_camel_case"));
|
||||||
|
|
||||||
documentMapper = index.mapperService().documentMapperParser().parse("type", documentMapper.mappingSource());
|
documentMapper = mapperService.documentMapperParser().parse("_doc", documentMapper.mappingSource());
|
||||||
|
|
||||||
assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase"));
|
assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase"));
|
||||||
assertNull(documentMapper.mappers().getMapper("this_is_camel_case"));
|
assertNull(documentMapper.mappers().getMapper("this_is_camel_case"));
|
||||||
|
|
|
@ -20,21 +20,15 @@
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.elasticsearch.common.Strings;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.index.IndexService;
|
|
||||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
|
||||||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
|
||||||
import org.hamcrest.Matchers;
|
import org.hamcrest.Matchers;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -44,35 +38,33 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.hamcrest.Matchers.startsWith;
|
import static org.hamcrest.Matchers.startsWith;
|
||||||
|
|
||||||
public class CopyToMapperTests extends ESSingleNodeTestCase {
|
public class CopyToMapperTests extends MapperServiceTestCase {
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public void testCopyToFieldsParsing() throws Exception {
|
public void testCopyToFieldsParsing() throws Exception {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties")
|
MapperService mapperService = createMapperService(mapping(b -> {
|
||||||
.startObject("copy_test")
|
b.startObject("copy_test");
|
||||||
.field("type", "text")
|
{
|
||||||
.array("copy_to", "another_field", "cyclic_test")
|
b.field("type", "text");
|
||||||
.endObject()
|
b.array("copy_to", "another_field", "cyclic_test");
|
||||||
|
}
|
||||||
.startObject("another_field")
|
b.endObject();
|
||||||
.field("type", "text")
|
b.startObject("another_field").field("type", "text").endObject();
|
||||||
.endObject()
|
b.startObject("cyclic_test");
|
||||||
|
{
|
||||||
.startObject("cyclic_test")
|
b.field("type", "text");
|
||||||
.field("type", "text")
|
b.array("copy_to", "copy_test");
|
||||||
.array("copy_to", "copy_test")
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
|
b.startObject("int_to_str_test");
|
||||||
.startObject("int_to_str_test")
|
{
|
||||||
.field("type", "integer")
|
b.field("type", "integer");
|
||||||
.field("doc_values", false)
|
b.field("doc_values", false);
|
||||||
.array("copy_to", "another_field", "new_field")
|
b.array("copy_to", "another_field", "new_field");
|
||||||
.endObject()
|
}
|
||||||
.endObject().endObject().endObject());
|
b.endObject();
|
||||||
|
}));
|
||||||
IndexService index = createIndex("test");
|
Mapper fieldMapper = mapperService.documentMapper().mappers().getMapper("copy_test");
|
||||||
client().admin().indices().preparePutMapping("test").setType("type1").setSource(mapping, XContentType.JSON).get();
|
|
||||||
DocumentMapper docMapper = index.mapperService().documentMapper("type1");
|
|
||||||
Mapper fieldMapper = docMapper.mappers().getMapper("copy_test");
|
|
||||||
|
|
||||||
// Check json serialization
|
// Check json serialization
|
||||||
TextFieldMapper stringFieldMapper = (TextFieldMapper) fieldMapper;
|
TextFieldMapper stringFieldMapper = (TextFieldMapper) fieldMapper;
|
||||||
|
@ -91,13 +83,11 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
||||||
assertThat(copyToList.get(1), equalTo("cyclic_test"));
|
assertThat(copyToList.get(1), equalTo("cyclic_test"));
|
||||||
|
|
||||||
// Check data parsing
|
// Check data parsing
|
||||||
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
|
ParsedDocument parsedDoc = mapperService.documentMapper().parse(source(b -> {
|
||||||
.field("copy_test", "foo")
|
b.field("copy_test", "foo");
|
||||||
.field("cyclic_test", "bar")
|
b.field("cyclic_test", "bar");
|
||||||
.field("int_to_str_test", 42)
|
b.field("int_to_str_test", 42);
|
||||||
.endObject());
|
}));
|
||||||
|
|
||||||
ParsedDocument parsedDoc = docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON));
|
|
||||||
ParseContext.Document doc = parsedDoc.rootDoc();
|
ParseContext.Document doc = parsedDoc.rootDoc();
|
||||||
assertThat(doc.getFields("copy_test").length, equalTo(2));
|
assertThat(doc.getFields("copy_test").length, equalTo(2));
|
||||||
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
|
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
|
||||||
|
@ -118,43 +108,41 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
||||||
assertThat(doc.getFields("new_field")[0].numericValue().intValue(), equalTo(42));
|
assertThat(doc.getFields("new_field")[0].numericValue().intValue(), equalTo(42));
|
||||||
|
|
||||||
assertNotNull(parsedDoc.dynamicMappingsUpdate());
|
assertNotNull(parsedDoc.dynamicMappingsUpdate());
|
||||||
client().admin().indices().preparePutMapping("test").setType("type1")
|
merge(mapperService, dynamicMapping(parsedDoc.dynamicMappingsUpdate()));
|
||||||
.setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get();
|
|
||||||
|
|
||||||
docMapper = index.mapperService().documentMapper("type1");
|
fieldMapper = mapperService.documentMapper().mappers().getMapper("new_field");
|
||||||
fieldMapper = docMapper.mappers().getMapper("new_field");
|
|
||||||
assertThat(fieldMapper.typeName(), equalTo("long"));
|
assertThat(fieldMapper.typeName(), equalTo("long"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToFieldsInnerObjectParsing() throws Exception {
|
public void testCopyToFieldsInnerObjectParsing() throws Exception {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties")
|
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
|
||||||
|
b.startObject("copy_test");
|
||||||
|
{
|
||||||
|
b.field("type", "text");
|
||||||
|
b.field("copy_to", "very.inner.field");
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
b.startObject("very");
|
||||||
|
{
|
||||||
|
b.field("type", "object");
|
||||||
|
b.startObject("properties");
|
||||||
|
{
|
||||||
|
b.startObject("inner").field("type", "object").endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
|
|
||||||
.startObject("copy_test")
|
ParseContext.Document doc = docMapper.parse(source(b -> {
|
||||||
.field("type", "text")
|
b.field("copy_test", "foo");
|
||||||
.field("copy_to", "very.inner.field")
|
b.startObject("foo");
|
||||||
.endObject()
|
{
|
||||||
|
b.startObject("bar").field("baz", "zoo").endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
})).rootDoc();
|
||||||
|
|
||||||
.startObject("very")
|
|
||||||
.field("type", "object")
|
|
||||||
.startObject("properties")
|
|
||||||
.startObject("inner")
|
|
||||||
.field("type", "object")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
|
|
||||||
.endObject().endObject().endObject());
|
|
||||||
|
|
||||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
|
||||||
.parse("type1", new CompressedXContent(mapping));
|
|
||||||
|
|
||||||
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
|
|
||||||
.field("copy_test", "foo")
|
|
||||||
.startObject("foo").startObject("bar").field("baz", "zoo").endObject().endObject()
|
|
||||||
.endObject());
|
|
||||||
|
|
||||||
ParseContext.Document doc = docMapper.parse(new SourceToParse("test", "type1", "1", json,
|
|
||||||
XContentType.JSON)).rootDoc();
|
|
||||||
assertThat(doc.getFields("copy_test").length, equalTo(1));
|
assertThat(doc.getFields("copy_test").length, equalTo(1));
|
||||||
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
|
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
|
||||||
|
|
||||||
|
@ -164,25 +152,20 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToDynamicInnerObjectParsing() throws Exception {
|
public void testCopyToDynamicInnerObjectParsing() throws Exception {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
|
||||||
.startObject("properties")
|
b.startObject("copy_test");
|
||||||
.startObject("copy_test")
|
{
|
||||||
.field("type", "text")
|
b.field("type", "text");
|
||||||
.field("copy_to", "very.inner.field")
|
b.field("copy_to", "very.inner.field");
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject().endObject());
|
}));
|
||||||
|
|
||||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
ParseContext.Document doc = docMapper.parse(source(b -> {
|
||||||
.parse("type1", new CompressedXContent(mapping));
|
b.field("copy_test", "foo");
|
||||||
|
b.field("new_field", "bar");
|
||||||
|
})).rootDoc();
|
||||||
|
|
||||||
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
|
|
||||||
.field("copy_test", "foo")
|
|
||||||
.field("new_field", "bar")
|
|
||||||
.endObject());
|
|
||||||
|
|
||||||
ParseContext.Document doc = docMapper.parse(new SourceToParse("test", "type1", "1", json,
|
|
||||||
XContentType.JSON)).rootDoc();
|
|
||||||
assertThat(doc.getFields("copy_test").length, equalTo(1));
|
assertThat(doc.getFields("copy_test").length, equalTo(1));
|
||||||
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
|
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
|
||||||
|
|
||||||
|
@ -194,33 +177,30 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToDynamicInnerInnerObjectParsing() throws Exception {
|
public void testCopyToDynamicInnerInnerObjectParsing() throws Exception {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
|
||||||
.startObject("properties")
|
b.startObject("copy_test");
|
||||||
.startObject("copy_test")
|
{
|
||||||
.field("type", "text")
|
b.field("type", "text");
|
||||||
.field("copy_to", "very.far.inner.field")
|
b.field("copy_to", "very.far.inner.field");
|
||||||
.endObject()
|
}
|
||||||
.startObject("very")
|
b.endObject();
|
||||||
.field("type", "object")
|
b.startObject("very");
|
||||||
.startObject("properties")
|
{
|
||||||
.startObject("far")
|
b.field("type", "object");
|
||||||
.field("type", "object")
|
b.startObject("properties");
|
||||||
.endObject()
|
{
|
||||||
.endObject()
|
b.startObject("far").field("type", "object").endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject().endObject());
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
|
|
||||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
ParseContext.Document doc = docMapper.parse(source(b -> {
|
||||||
.parse("type1", new CompressedXContent(mapping));
|
b.field("copy_test", "foo");
|
||||||
|
b.field("new_field", "bar");
|
||||||
|
})).rootDoc();
|
||||||
|
|
||||||
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
|
|
||||||
.field("copy_test", "foo")
|
|
||||||
.field("new_field", "bar")
|
|
||||||
.endObject());
|
|
||||||
|
|
||||||
ParseContext.Document doc = docMapper.parse(new SourceToParse("test", "type1", "1", json,
|
|
||||||
XContentType.JSON)).rootDoc();
|
|
||||||
assertThat(doc.getFields("copy_test").length, equalTo(1));
|
assertThat(doc.getFields("copy_test").length, equalTo(1));
|
||||||
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
|
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
|
||||||
|
|
||||||
|
@ -232,95 +212,84 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToStrictDynamicInnerObjectParsing() throws Exception {
|
public void testCopyToStrictDynamicInnerObjectParsing() throws Exception {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
DocumentMapper docMapper = createDocumentMapper(topMapping(b -> {
|
||||||
.field("dynamic", "strict")
|
b.field("dynamic", "strict");
|
||||||
.startObject("properties")
|
b.startObject("properties");
|
||||||
.startObject("copy_test")
|
{
|
||||||
.field("type", "text")
|
b.startObject("copy_test");
|
||||||
.field("copy_to", "very.inner.field")
|
{
|
||||||
.endObject()
|
b.field("type", "text");
|
||||||
.endObject()
|
b.field("copy_to", "very.inner.field");
|
||||||
.endObject().endObject());
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
|
|
||||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||||
.parse("type1", new CompressedXContent(mapping));
|
() -> docMapper.parse(source(b -> b.field("copy_test", "foo"))));
|
||||||
|
|
||||||
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
|
assertThat(e.getMessage(), startsWith("mapping set to strict, dynamic introduction of [very] within [_doc] is not allowed"));
|
||||||
.field("copy_test", "foo")
|
|
||||||
.endObject());
|
|
||||||
|
|
||||||
try {
|
|
||||||
docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc();
|
|
||||||
fail();
|
|
||||||
} catch (MapperParsingException ex) {
|
|
||||||
assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [very] within [type1] is not allowed"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception {
|
public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
|
||||||
.startObject("properties")
|
|
||||||
.startObject("copy_test")
|
|
||||||
.field("type", "text")
|
|
||||||
.field("copy_to", "very.far.field")
|
|
||||||
.endObject()
|
|
||||||
.startObject("very")
|
|
||||||
.field("type", "object")
|
|
||||||
.startObject("properties")
|
|
||||||
.startObject("far")
|
|
||||||
.field("type", "object")
|
|
||||||
.field("dynamic", "strict")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
|
|
||||||
.endObject()
|
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
|
||||||
.endObject().endObject());
|
b.startObject("copy_test");
|
||||||
|
{
|
||||||
|
b.field("type", "text");
|
||||||
|
b.field("copy_to", "very.far.field");
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
b.startObject("very");
|
||||||
|
{
|
||||||
|
b.field("type", "object");
|
||||||
|
b.startObject("properties");
|
||||||
|
{
|
||||||
|
b.startObject("far");
|
||||||
|
{
|
||||||
|
b.field("type", "object");
|
||||||
|
b.field("dynamic", "strict");
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
|
|
||||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||||
.parse("type1", new CompressedXContent(mapping));
|
() -> docMapper.parse(source(b -> b.field("copy_test", "foo"))));
|
||||||
|
|
||||||
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
|
assertThat(e.getMessage(),
|
||||||
.field("copy_test", "foo")
|
startsWith("mapping set to strict, dynamic introduction of [field] within [very.far] is not allowed"));
|
||||||
.endObject());
|
|
||||||
|
|
||||||
try {
|
|
||||||
docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc();
|
|
||||||
fail();
|
|
||||||
} catch (MapperParsingException ex) {
|
|
||||||
assertThat(ex.getMessage(),
|
|
||||||
startsWith("mapping set to strict, dynamic introduction of [field] within [very.far] is not allowed"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToFieldMerge() throws Exception {
|
public void testCopyToFieldMerge() throws Exception {
|
||||||
String mappingBefore = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties")
|
|
||||||
|
|
||||||
.startObject("copy_test")
|
MapperService mapperService = createMapperService(mapping(b -> {
|
||||||
.field("type", "text")
|
b.startObject("copy_test");
|
||||||
.array("copy_to", "foo", "bar")
|
{
|
||||||
.endObject()
|
b.field("type", "text");
|
||||||
|
b.array("copy_to", "foo", "bar");
|
||||||
.endObject().endObject().endObject());
|
}
|
||||||
|
b.endObject();
|
||||||
String mappingAfter = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties")
|
}));
|
||||||
|
DocumentMapper docMapperBefore = mapperService.documentMapper();
|
||||||
.startObject("copy_test")
|
|
||||||
.field("type", "text")
|
|
||||||
.array("copy_to", "baz", "bar")
|
|
||||||
.endObject()
|
|
||||||
|
|
||||||
.endObject().endObject().endObject());
|
|
||||||
|
|
||||||
MapperService mapperService = createIndex("test").mapperService();
|
|
||||||
DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore),
|
|
||||||
MapperService.MergeReason.MAPPING_UPDATE);
|
|
||||||
FieldMapper fieldMapperBefore = (FieldMapper) docMapperBefore.mappers().getMapper("copy_test");
|
FieldMapper fieldMapperBefore = (FieldMapper) docMapperBefore.mappers().getMapper("copy_test");
|
||||||
|
|
||||||
assertEquals(Arrays.asList("foo", "bar"), fieldMapperBefore.copyTo().copyToFields());
|
assertEquals(Arrays.asList("foo", "bar"), fieldMapperBefore.copyTo().copyToFields());
|
||||||
|
|
||||||
DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter),
|
merge(mapperService, mapping(b -> {
|
||||||
MapperService.MergeReason.MAPPING_UPDATE);
|
b.startObject("copy_test");
|
||||||
|
{
|
||||||
|
b.field("type", "text");
|
||||||
|
b.array("copy_to", "baz", "bar");
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
|
|
||||||
|
DocumentMapper docMapperAfter = mapperService.documentMapper();
|
||||||
FieldMapper fieldMapperAfter = (FieldMapper) docMapperAfter.mappers().getMapper("copy_test");
|
FieldMapper fieldMapperAfter = (FieldMapper) docMapperAfter.mappers().getMapper("copy_test");
|
||||||
|
|
||||||
assertEquals(Arrays.asList("baz", "bar"), fieldMapperAfter.copyTo().copyToFields());
|
assertEquals(Arrays.asList("baz", "bar"), fieldMapperAfter.copyTo().copyToFields());
|
||||||
|
@ -328,73 +297,84 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToNestedField() throws Exception {
|
public void testCopyToNestedField() throws Exception {
|
||||||
IndexService indexService = createIndex("test");
|
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
|
||||||
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
|
b.startObject("target");
|
||||||
XContentBuilder mapping = jsonBuilder().startObject()
|
{
|
||||||
.startObject("type")
|
b.field("type", "long");
|
||||||
.startObject("properties")
|
b.field("doc_values", false);
|
||||||
.startObject("target")
|
}
|
||||||
.field("type", "long")
|
b.endObject();
|
||||||
.field("doc_values", false)
|
b.startObject("n1");
|
||||||
.endObject()
|
{
|
||||||
.startObject("n1")
|
b.field("type", "nested");
|
||||||
.field("type", "nested")
|
b.startObject("properties");
|
||||||
.startObject("properties")
|
{
|
||||||
.startObject("target")
|
b.startObject("target");
|
||||||
.field("type", "long")
|
{
|
||||||
.field("doc_values", false)
|
b.field("type", "long");
|
||||||
.endObject()
|
b.field("doc_values", false);
|
||||||
.startObject("n2")
|
}
|
||||||
.field("type", "nested")
|
b.endObject();
|
||||||
.startObject("properties")
|
b.startObject("n2");
|
||||||
.startObject("target")
|
{
|
||||||
.field("type", "long")
|
b.field("type", "nested");
|
||||||
.field("doc_values", false)
|
b.startObject("properties");
|
||||||
.endObject()
|
{
|
||||||
.startObject("source")
|
b.startObject("target");
|
||||||
.field("type", "long")
|
{
|
||||||
.field("doc_values", false)
|
b.field("type", "long");
|
||||||
.startArray("copy_to")
|
b.field("doc_values", false);
|
||||||
.value("target") // should go to the root doc
|
}
|
||||||
.value("n1.target") // should go to the parent doc
|
b.endObject();
|
||||||
.value("n1.n2.target") // should go to the current doc
|
b.startObject("source");
|
||||||
.endArray()
|
{
|
||||||
.endObject()
|
b.field("type", "long");
|
||||||
.endObject()
|
b.field("doc_values", false);
|
||||||
.endObject()
|
b.startArray("copy_to");
|
||||||
.endObject()
|
{
|
||||||
.endObject()
|
b.value("target"); // should go to the root doc
|
||||||
.endObject()
|
b.value("n1.target"); // should go to the parent doc
|
||||||
.endObject()
|
b.value("n1.n2.target"); // should go to the current doc
|
||||||
.endObject();
|
}
|
||||||
|
b.endArray();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
|
|
||||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
|
ParsedDocument doc = mapper.parse(source(b -> {
|
||||||
|
b.startArray("n1");
|
||||||
|
{
|
||||||
|
b.startObject();
|
||||||
|
{
|
||||||
|
b.startArray("n2");
|
||||||
|
{
|
||||||
|
b.startObject().field("source", 3).endObject();
|
||||||
|
b.startObject().field("source", 5).endObject();
|
||||||
|
}
|
||||||
|
b.endArray();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
b.startObject();
|
||||||
|
{
|
||||||
|
b.startArray("n2");
|
||||||
|
{
|
||||||
|
b.startObject().field("source", 7).endObject();
|
||||||
|
}
|
||||||
|
b.endArray();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endArray();
|
||||||
|
}));
|
||||||
|
|
||||||
XContentBuilder jsonDoc = XContentFactory.jsonBuilder()
|
|
||||||
.startObject()
|
|
||||||
.startArray("n1")
|
|
||||||
.startObject()
|
|
||||||
.startArray("n2")
|
|
||||||
.startObject()
|
|
||||||
.field("source", 3)
|
|
||||||
.endObject()
|
|
||||||
.startObject()
|
|
||||||
.field("source", 5)
|
|
||||||
.endObject()
|
|
||||||
.endArray()
|
|
||||||
.endObject()
|
|
||||||
.startObject()
|
|
||||||
.startArray("n2")
|
|
||||||
.startObject()
|
|
||||||
.field("source", 7)
|
|
||||||
.endObject()
|
|
||||||
.endArray()
|
|
||||||
.endObject()
|
|
||||||
.endArray()
|
|
||||||
.endObject();
|
|
||||||
|
|
||||||
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1",
|
|
||||||
BytesReference.bytes(jsonDoc), XContentType.JSON));
|
|
||||||
assertEquals(6, doc.docs().size());
|
assertEquals(6, doc.docs().size());
|
||||||
|
|
||||||
Document nested = doc.docs().get(0);
|
Document nested = doc.docs().get(0);
|
||||||
|
@ -428,145 +408,139 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
||||||
assertFieldValue(root, "n1.n2.target");
|
assertFieldValue(root, "n1.n2.target");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToChildNested() throws Exception {
|
public void testCopyToChildNested() {
|
||||||
IndexService indexService = createIndex("test");
|
|
||||||
XContentBuilder rootToNestedMapping = jsonBuilder().startObject()
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> {
|
||||||
.startObject("_doc")
|
b.startObject("source");
|
||||||
.startObject("properties")
|
{
|
||||||
.startObject("source")
|
b.field("type", "long");
|
||||||
.field("type", "long")
|
b.field("copy_to", "n1.target");
|
||||||
.field("copy_to", "n1.target")
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.startObject("n1")
|
b.startObject("n1");
|
||||||
.field("type", "nested")
|
{
|
||||||
.startObject("properties")
|
b.field("type", "nested");
|
||||||
.startObject("target")
|
b.startObject("properties");
|
||||||
.field("type", "long")
|
{
|
||||||
.endObject()
|
b.startObject("target").field("type", "long").endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject();
|
})));
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
|
||||||
() -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)),
|
|
||||||
MergeReason.MAPPING_UPDATE));
|
|
||||||
assertThat(e.getMessage(), Matchers.startsWith("Illegal combination of [copy_to] and [nested] mappings"));
|
assertThat(e.getMessage(), Matchers.startsWith("Illegal combination of [copy_to] and [nested] mappings"));
|
||||||
|
|
||||||
XContentBuilder nestedToNestedMapping = jsonBuilder().startObject()
|
expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> {
|
||||||
.startObject("_doc")
|
b.startObject("n1");
|
||||||
.startObject("properties")
|
{
|
||||||
.startObject("n1")
|
b.field("type", "nested");
|
||||||
.field("type", "nested")
|
b.startObject("properties");
|
||||||
.startObject("properties")
|
{
|
||||||
.startObject("source")
|
b.startObject("source");
|
||||||
.field("type", "long")
|
{
|
||||||
.field("copy_to", "n1.n2.target")
|
b.field("type", "long");
|
||||||
.endObject()
|
b.field("copy_to", "n1.n2.target");
|
||||||
.startObject("n2")
|
}
|
||||||
.field("type", "nested")
|
b.endObject();
|
||||||
.startObject("properties")
|
b.startObject("n2");
|
||||||
.startObject("target")
|
{
|
||||||
.field("type", "long")
|
b.field("type", "nested");
|
||||||
.endObject()
|
b.startObject("properties");
|
||||||
.endObject()
|
{
|
||||||
.endObject()
|
b.startObject("target").field("type", "long").endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject();
|
}
|
||||||
e = expectThrows(IllegalArgumentException.class,
|
b.endObject();
|
||||||
() -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(nestedToNestedMapping)),
|
}
|
||||||
MergeReason.MAPPING_UPDATE));
|
b.endObject();
|
||||||
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToSiblingNested() throws Exception {
|
public void testCopyToSiblingNested() {
|
||||||
IndexService indexService = createIndex("test");
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> {
|
||||||
XContentBuilder rootToNestedMapping = jsonBuilder().startObject()
|
b.startObject("n1");
|
||||||
.startObject("_doc")
|
{
|
||||||
.startObject("properties")
|
b.field("type", "nested");
|
||||||
.startObject("n1")
|
b.startObject("properties");
|
||||||
.field("type", "nested")
|
{
|
||||||
.startObject("properties")
|
b.startObject("source");
|
||||||
.startObject("source")
|
{
|
||||||
.field("type", "long")
|
b.field("type", "long");
|
||||||
.field("copy_to", "n2.target")
|
b.field("copy_to", "n2.target");
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
}
|
||||||
.startObject("n2")
|
b.endObject();
|
||||||
.field("type", "nested")
|
}
|
||||||
.startObject("properties")
|
b.endObject();
|
||||||
.startObject("target")
|
b.startObject("n2");
|
||||||
.field("type", "long")
|
{
|
||||||
.endObject()
|
b.field("type", "nested");
|
||||||
.endObject()
|
b.startObject("properties");
|
||||||
.endObject()
|
{
|
||||||
.endObject()
|
b.startObject("target").field("type", "long").endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject();
|
b.endObject();
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
}
|
||||||
() -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)),
|
b.endObject();
|
||||||
MergeReason.MAPPING_UPDATE));
|
})));
|
||||||
assertThat(e.getMessage(), Matchers.startsWith("Illegal combination of [copy_to] and [nested] mappings"));
|
assertThat(e.getMessage(), Matchers.startsWith("Illegal combination of [copy_to] and [nested] mappings"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToObject() throws Exception {
|
public void testCopyToObject() {
|
||||||
IndexService indexService = createIndex("test");
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> {
|
||||||
XContentBuilder rootToNestedMapping = jsonBuilder().startObject()
|
b.startObject("source");
|
||||||
.startObject("_doc")
|
{
|
||||||
.startObject("properties")
|
b.field("type", "long");
|
||||||
.startObject("source")
|
b.field("copy_to", "target");
|
||||||
.field("type", "long")
|
}
|
||||||
.field("copy_to", "target")
|
b.endObject();
|
||||||
.endObject()
|
b.startObject("target");
|
||||||
.startObject("target")
|
{
|
||||||
.field("type", "object")
|
b.field("type", "object");
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
})));
|
||||||
.endObject();
|
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
|
||||||
() -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)),
|
|
||||||
MergeReason.MAPPING_UPDATE));
|
|
||||||
assertThat(e.getMessage(), Matchers.startsWith("Cannot copy to field [target] since it is mapped as an object"));
|
assertThat(e.getMessage(), Matchers.startsWith("Cannot copy to field [target] since it is mapped as an object"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToDynamicNestedObjectParsing() throws Exception {
|
public void testCopyToDynamicNestedObjectParsing() throws Exception {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
DocumentMapper docMapper = createDocumentMapper(topMapping(b -> {
|
||||||
.startArray("dynamic_templates")
|
b.startArray("dynamic_templates");
|
||||||
.startObject()
|
{
|
||||||
.startObject("objects")
|
b.startObject();
|
||||||
.field("match_mapping_type", "object")
|
{
|
||||||
.startObject("mapping")
|
b.startObject("objects");
|
||||||
.field("type", "nested")
|
{
|
||||||
.endObject()
|
b.field("match_mapping_type", "object");
|
||||||
.endObject()
|
b.startObject("mapping").field("type", "nested").endObject();
|
||||||
.endObject()
|
}
|
||||||
.endArray()
|
b.endObject();
|
||||||
.startObject("properties")
|
}
|
||||||
.startObject("copy_test")
|
b.endObject();
|
||||||
.field("type", "text")
|
}
|
||||||
.field("copy_to", "very.inner.field")
|
b.endArray();
|
||||||
.endObject()
|
b.startObject("properties");
|
||||||
.endObject()
|
{
|
||||||
.endObject().endObject());
|
b.startObject("copy_test");
|
||||||
|
{
|
||||||
|
b.field("type", "text");
|
||||||
|
b.field("copy_to", "very.inner.field");
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
|
|
||||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
|
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source(b -> {
|
||||||
.parse("type1", new CompressedXContent(mapping));
|
b.field("copy_test", "foo");
|
||||||
|
b.field("new_field", "bar");
|
||||||
|
})));
|
||||||
|
|
||||||
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
|
assertThat(e.getMessage(), startsWith("It is forbidden to create dynamic nested objects ([very]) through `copy_to`"));
|
||||||
.field("copy_test", "foo")
|
|
||||||
.field("new_field", "bar")
|
|
||||||
.endObject());
|
|
||||||
|
|
||||||
try {
|
|
||||||
docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc();
|
|
||||||
fail();
|
|
||||||
} catch (MapperParsingException ex) {
|
|
||||||
assertThat(ex.getMessage(), startsWith("It is forbidden to create dynamic nested objects ([very]) through `copy_to`"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertFieldValue(Document doc, String field, Number... expected) {
|
private void assertFieldValue(Document doc, String field, Number... expected) {
|
||||||
|
@ -581,95 +555,78 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
|
||||||
assertArrayEquals(expected, actual);
|
assertArrayEquals(expected, actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyToMultiField() throws Exception {
|
public void testCopyToMultiField() {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc")
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> {
|
||||||
.startObject("properties")
|
b.startObject("my_field");
|
||||||
.startObject("my_field")
|
{
|
||||||
.field("type", "keyword")
|
b.field("type", "keyword");
|
||||||
.field("copy_to", "my_field.bar")
|
b.field("copy_to", "my_field.bar");
|
||||||
.startObject("fields")
|
b.startObject("fields");
|
||||||
.startObject("bar")
|
{
|
||||||
.field("type", "text")
|
b.startObject("bar").field("type", "text").endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject().endObject());
|
})));
|
||||||
|
|
||||||
MapperService mapperService = createIndex("test").mapperService();
|
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
|
||||||
() -> mapperService.merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE));
|
|
||||||
assertEquals("[copy_to] may not be used to copy to a multi-field: [my_field.bar]", e.getMessage());
|
assertEquals("[copy_to] may not be used to copy to a multi-field: [my_field.bar]", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNestedCopyTo() throws Exception {
|
public void testNestedCopyTo() throws IOException {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc")
|
createDocumentMapper(fieldMapping(b -> {
|
||||||
.startObject("properties")
|
b.field("type", "nested");
|
||||||
.startObject("n")
|
b.startObject("properties");
|
||||||
.field("type", "nested")
|
{
|
||||||
.startObject("properties")
|
b.startObject("foo");
|
||||||
.startObject("foo")
|
{
|
||||||
.field("type", "keyword")
|
b.field("type", "keyword");
|
||||||
.field("copy_to", "n.bar")
|
b.field("copy_to", "n.bar");
|
||||||
.endObject()
|
}
|
||||||
.startObject("bar")
|
b.endObject();
|
||||||
.field("type", "text")
|
b.startObject("bar").field("type", "text").endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
}));
|
||||||
.endObject()
|
|
||||||
.endObject().endObject());
|
|
||||||
|
|
||||||
MapperService mapperService = createIndex("test").mapperService();
|
|
||||||
mapperService.merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); // no exception
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNestedCopyToMultiField() throws Exception {
|
public void testNestedCopyToMultiField() {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc")
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createMapperService(fieldMapping(b -> {
|
||||||
.startObject("properties")
|
b.field("type", "nested");
|
||||||
.startObject("n")
|
b.startObject("properties");
|
||||||
.field("type", "nested")
|
{
|
||||||
.startObject("properties")
|
b.startObject("my_field");
|
||||||
.startObject("my_field")
|
{
|
||||||
.field("type", "keyword")
|
b.field("type", "keyword");
|
||||||
.field("copy_to", "n.my_field.bar")
|
b.field("copy_to", "field.my_field.bar");
|
||||||
.startObject("fields")
|
b.startObject("fields");
|
||||||
.startObject("bar")
|
{
|
||||||
.field("type", "text")
|
b.startObject("bar").field("type", "text").endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject().endObject());
|
})));
|
||||||
|
assertEquals("[copy_to] may not be used to copy to a multi-field: [field.my_field.bar]", e.getMessage());
|
||||||
MapperService mapperService = createIndex("test").mapperService();
|
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
|
||||||
() -> mapperService.merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE));
|
|
||||||
assertEquals("[copy_to] may not be used to copy to a multi-field: [n.my_field.bar]", e.getMessage());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyFromMultiField() throws Exception {
|
public void testCopyFromMultiField() {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc")
|
MapperParsingException e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
|
||||||
.startObject("properties")
|
b.field("type", "keyword");
|
||||||
.startObject("my_field")
|
b.startObject("fields");
|
||||||
.field("type", "keyword")
|
{
|
||||||
.startObject("fields")
|
b.startObject("bar");
|
||||||
.startObject("bar")
|
{
|
||||||
.field("type", "text")
|
b.field("type", "text");
|
||||||
.field("copy_to", "my_field.baz")
|
b.field("copy_to", "my_field.baz");
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject().endObject());
|
})));
|
||||||
|
|
||||||
MapperService mapperService = createIndex("test").mapperService();
|
|
||||||
MapperParsingException e = expectThrows(MapperParsingException.class,
|
|
||||||
() -> mapperService.merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE));
|
|
||||||
assertThat(e.getMessage(),
|
assertThat(e.getMessage(),
|
||||||
Matchers.containsString("copy_to in multi fields is not allowed. Found the copy_to in field [bar] " +
|
Matchers.containsString("copy_to in multi fields is not allowed. Found the copy_to in field [bar] " +
|
||||||
"which is within a multi field."));
|
"which is within a multi field."));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,84 +19,63 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.common.Strings;
|
public class DocumentMapperParserTests extends MapperServiceTestCase {
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
|
||||||
import org.elasticsearch.index.IndexService;
|
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
|
||||||
|
|
||||||
public class DocumentMapperParserTests extends ESSingleNodeTestCase {
|
|
||||||
public void testTypeLevel() throws Exception {
|
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
|
||||||
.endObject().endObject());
|
|
||||||
|
|
||||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
|
||||||
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
|
|
||||||
assertThat(mapper.type(), equalTo("type"));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testFieldNameWithDots() throws Exception {
|
public void testFieldNameWithDots() throws Exception {
|
||||||
IndexService indexService = createIndex("test");
|
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
|
||||||
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
|
b.startObject("foo.bar").field("type", "text").endObject();
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
b.startObject("foo.baz").field("type", "keyword").endObject();
|
||||||
.startObject("foo.bar").field("type", "text").endObject()
|
}));
|
||||||
.startObject("foo.baz").field("type", "keyword").endObject()
|
|
||||||
.endObject().endObject().endObject());
|
|
||||||
DocumentMapper docMapper = mapperParser.parse("type", new CompressedXContent(mapping));
|
|
||||||
assertNotNull(docMapper.mappers().getMapper("foo.bar"));
|
assertNotNull(docMapper.mappers().getMapper("foo.bar"));
|
||||||
assertNotNull(docMapper.mappers().getMapper("foo.baz"));
|
assertNotNull(docMapper.mappers().getMapper("foo.baz"));
|
||||||
assertNotNull(docMapper.objectMappers().get("foo"));
|
assertNotNull(docMapper.objectMappers().get("foo"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFieldNameWithDeepDots() throws Exception {
|
public void testFieldNameWithDeepDots() throws Exception {
|
||||||
IndexService indexService = createIndex("test");
|
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
|
||||||
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
|
b.startObject("foo.bar").field("type", "text").endObject();
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
b.startObject("foo.baz");
|
||||||
.startObject("foo.bar").field("type", "text").endObject()
|
{
|
||||||
.startObject("foo.baz").startObject("properties")
|
b.startObject("properties");
|
||||||
.startObject("deep.field").field("type", "keyword").endObject().endObject()
|
{
|
||||||
.endObject().endObject().endObject().endObject());
|
b.startObject("deep.field").field("type", "keyword").endObject();
|
||||||
DocumentMapper docMapper = mapperParser.parse("type", new CompressedXContent(mapping));
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
assertNotNull(docMapper.mappers().getMapper("foo.bar"));
|
assertNotNull(docMapper.mappers().getMapper("foo.bar"));
|
||||||
assertNotNull(docMapper.mappers().getMapper("foo.baz.deep.field"));
|
assertNotNull(docMapper.mappers().getMapper("foo.baz.deep.field"));
|
||||||
assertNotNull(docMapper.objectMappers().get("foo"));
|
assertNotNull(docMapper.objectMappers().get("foo"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFieldNameWithDotsConflict() throws Exception {
|
public void testFieldNameWithDotsConflict() {
|
||||||
IndexService indexService = createIndex("test");
|
MapperParsingException e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(mapping(b -> {
|
||||||
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
|
b.startObject("foo").field("type", "text").endObject();
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
|
b.startObject("foo.baz").field("type", "keyword").endObject();
|
||||||
.startObject("foo").field("type", "text").endObject()
|
})));
|
||||||
.startObject("foo.baz").field("type", "keyword").endObject()
|
|
||||||
.endObject().endObject().endObject());
|
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
|
|
||||||
mapperParser.parse("type", new CompressedXContent(mapping)));
|
|
||||||
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [text] to [ObjectMapper]"));
|
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [text] to [ObjectMapper]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultiFieldsWithFieldAlias() throws Exception {
|
public void testMultiFieldsWithFieldAlias() {
|
||||||
IndexService indexService = createIndex("test");
|
MapperParsingException e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(mapping(b -> {
|
||||||
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
|
b.startObject("field");
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
{
|
||||||
.startObject("properties")
|
b.field("type", "text");
|
||||||
.startObject("field")
|
b.startObject("fields");
|
||||||
.field("type", "text")
|
{
|
||||||
.startObject("fields")
|
b.startObject("alias");
|
||||||
.startObject("alias")
|
{
|
||||||
.field("type", "alias")
|
b.field("type", "alias");
|
||||||
.field("path", "other-field")
|
b.field("path", "other-field");
|
||||||
.endObject()
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
}
|
||||||
.startObject("other-field")
|
b.endObject();
|
||||||
.field("type", "keyword")
|
}
|
||||||
.endObject()
|
b.endObject();
|
||||||
.endObject()
|
b.startObject("other-field").field("type", "keyword").endObject();
|
||||||
.endObject().endObject());
|
})));
|
||||||
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
|
assertEquals("Failed to parse mapping [_doc]: Type [alias] cannot be used in multi field", e.getMessage());
|
||||||
mapperParser.parse("type", new CompressedXContent(mapping)));
|
|
||||||
assertEquals("Type [alias] cannot be used in multi field", e.getMessage());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,18 +19,19 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||||
import org.elasticsearch.common.bytes.BytesArray;
|
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.index.analysis.AnalyzerScope;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.index.analysis.IndexAnalyzers;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.CyclicBarrier;
|
import java.util.concurrent.CyclicBarrier;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
@ -41,24 +42,29 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.notNullValue;
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
public class DocumentMapperTests extends ESSingleNodeTestCase {
|
public class DocumentMapperTests extends MapperServiceTestCase {
|
||||||
|
|
||||||
public void testAddFields() throws Exception {
|
public void testAddFields() throws Exception {
|
||||||
String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
|
DocumentMapper stage1
|
||||||
.startObject("name").field("type", "text").endObject()
|
= createDocumentMapper(mapping(b -> b.startObject("name").field("type", "text").endObject()));
|
||||||
.endObject().endObject().endObject());
|
|
||||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
DocumentMapper stage2 = createDocumentMapper(mapping(b -> {
|
||||||
DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping));
|
b.startObject("name").field("type", "text").endObject();
|
||||||
String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
|
b.startObject("age").field("type", "integer").endObject();
|
||||||
.startObject("name").field("type", "text").endObject()
|
b.startObject("obj1");
|
||||||
.startObject("age").field("type", "integer").endObject()
|
{
|
||||||
.startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject()
|
b.startObject("properties");
|
||||||
.endObject()
|
{
|
||||||
.endObject().endObject().endObject());
|
b.startObject("prop1").field("type", "integer").endObject();
|
||||||
DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping));
|
}
|
||||||
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
|
|
||||||
MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
|
MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
|
||||||
DocumentMapper merged = stage1.merge(stage2.mapping(), reason);
|
DocumentMapper merged = stage1.merge(stage2.mapping(), reason);
|
||||||
|
|
||||||
// stage1 mapping should not have been modified
|
// stage1 mapping should not have been modified
|
||||||
assertThat(stage1.mappers().getMapper("age"), nullValue());
|
assertThat(stage1.mappers().getMapper("age"), nullValue());
|
||||||
assertThat(stage1.mappers().getMapper("obj1.prop1"), nullValue());
|
assertThat(stage1.mappers().getMapper("obj1.prop1"), nullValue());
|
||||||
|
@ -68,14 +74,10 @@ public class DocumentMapperTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMergeObjectDynamic() throws Exception {
|
public void testMergeObjectDynamic() throws Exception {
|
||||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
DocumentMapper mapper = createDocumentMapper(mapping(b -> {}));
|
||||||
String objectMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject());
|
|
||||||
DocumentMapper mapper = parser.parse("type1", new CompressedXContent(objectMapping));
|
|
||||||
assertNull(mapper.root().dynamic());
|
assertNull(mapper.root().dynamic());
|
||||||
|
|
||||||
String withDynamicMapping = Strings.toString(
|
DocumentMapper withDynamicMapper = createDocumentMapper(topMapping(b -> b.field("dynamic", "false")));
|
||||||
XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject());
|
|
||||||
DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping));
|
|
||||||
assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
|
assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
|
||||||
|
|
||||||
DocumentMapper merged = mapper.merge(withDynamicMapper.mapping(), MergeReason.MAPPING_UPDATE);
|
DocumentMapper merged = mapper.merge(withDynamicMapper.mapping(), MergeReason.MAPPING_UPDATE);
|
||||||
|
@ -83,138 +85,115 @@ public class DocumentMapperTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMergeObjectAndNested() throws Exception {
|
public void testMergeObjectAndNested() throws Exception {
|
||||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
DocumentMapper objectMapper
|
||||||
String objectMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
= createDocumentMapper(mapping(b -> b.startObject("obj").field("type", "object").endObject()));
|
||||||
.startObject("obj").field("type", "object").endObject()
|
DocumentMapper nestedMapper
|
||||||
.endObject().endObject().endObject());
|
= createDocumentMapper(mapping(b -> b.startObject("obj").field("type", "nested").endObject()));
|
||||||
DocumentMapper objectMapper = parser.parse("type1", new CompressedXContent(objectMapping));
|
|
||||||
String nestedMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
|
|
||||||
.startObject("obj").field("type", "nested").endObject()
|
|
||||||
.endObject().endObject().endObject());
|
|
||||||
DocumentMapper nestedMapper = parser.parse("type1", new CompressedXContent(nestedMapping));
|
|
||||||
MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
|
MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
|
||||||
|
|
||||||
try {
|
{
|
||||||
objectMapper.merge(nestedMapper.mapping(), reason);
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
fail();
|
() -> objectMapper.merge(nestedMapper.mapping(), reason));
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("cannot change object mapping from non-nested to nested"));
|
assertThat(e.getMessage(), containsString("cannot change object mapping from non-nested to nested"));
|
||||||
}
|
}
|
||||||
|
{
|
||||||
try {
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
nestedMapper.merge(objectMapper.mapping(), reason);
|
() -> nestedMapper.merge(objectMapper.mapping(), reason));
|
||||||
fail();
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
assertThat(e.getMessage(), containsString("cannot change object mapping from nested to non-nested"));
|
assertThat(e.getMessage(), containsString("cannot change object mapping from nested to non-nested"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) {
|
||||||
|
Map<String, NamedAnalyzer> analyzers = new HashMap<>();
|
||||||
|
analyzers.put("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer()));
|
||||||
|
analyzers.put("keyword", new NamedAnalyzer("keyword", AnalyzerScope.INDEX, new KeywordAnalyzer()));
|
||||||
|
analyzers.put("whitespace", new NamedAnalyzer("whitespace", AnalyzerScope.INDEX, new WhitespaceAnalyzer()));
|
||||||
|
return new IndexAnalyzers(analyzers, Collections.emptyMap(), Collections.emptyMap());
|
||||||
|
}
|
||||||
|
|
||||||
public void testMergeSearchAnalyzer() throws Exception {
|
public void testMergeSearchAnalyzer() throws Exception {
|
||||||
XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
|
MapperService mapperService = createMapperService(fieldMapping(b -> {
|
||||||
.startObject("properties").startObject("field")
|
b.field("type", "text");
|
||||||
.field("type", "text")
|
b.field("analyzer", "default");
|
||||||
.field("analyzer", "standard")
|
b.field("search_analyzer", "whitespace");
|
||||||
.field("search_analyzer", "whitespace")
|
}));
|
||||||
.endObject().endObject()
|
|
||||||
.endObject().endObject();
|
|
||||||
MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping1).mapperService();
|
|
||||||
|
|
||||||
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(), equalTo("whitespace"));
|
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(),
|
||||||
|
equalTo("whitespace"));
|
||||||
|
|
||||||
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
merge(mapperService, fieldMapping(b -> {
|
||||||
.startObject("properties").startObject("field")
|
b.field("type", "text");
|
||||||
.field("type", "text")
|
b.field("analyzer", "default");
|
||||||
.field("analyzer", "standard")
|
b.field("search_analyzer", "keyword");
|
||||||
.field("search_analyzer", "keyword")
|
}));
|
||||||
.endObject().endObject()
|
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(),
|
||||||
.endObject().endObject());
|
equalTo("keyword"));
|
||||||
|
|
||||||
mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
|
||||||
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(), equalTo("keyword"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testChangeSearchAnalyzerToDefault() throws Exception {
|
public void testChangeSearchAnalyzerToDefault() throws Exception {
|
||||||
XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
|
|
||||||
.startObject("properties").startObject("field")
|
|
||||||
.field("type", "text")
|
|
||||||
.field("analyzer", "standard")
|
|
||||||
.field("search_analyzer", "whitespace")
|
|
||||||
.endObject().endObject()
|
|
||||||
.endObject().endObject();
|
|
||||||
MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping1).mapperService();
|
|
||||||
|
|
||||||
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(), equalTo("whitespace"));
|
MapperService mapperService = createMapperService(fieldMapping(b -> {
|
||||||
|
b.field("type", "text");
|
||||||
|
b.field("analyzer", "default");
|
||||||
|
b.field("search_analyzer", "whitespace");
|
||||||
|
}));
|
||||||
|
|
||||||
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(),
|
||||||
.startObject("properties").startObject("field")
|
equalTo("whitespace"));
|
||||||
.field("type", "text")
|
|
||||||
.field("analyzer", "standard")
|
|
||||||
.endObject().endObject()
|
|
||||||
.endObject().endObject());
|
|
||||||
|
|
||||||
mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
|
merge(mapperService, fieldMapping(b -> {
|
||||||
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(), equalTo("standard"));
|
b.field("type", "text");
|
||||||
|
b.field("analyzer", "default");
|
||||||
|
}));
|
||||||
|
|
||||||
|
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(),
|
||||||
|
equalTo("default"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testConcurrentMergeTest() throws Throwable {
|
public void testConcurrentMergeTest() throws Throwable {
|
||||||
final MapperService mapperService = createIndex("test").mapperService();
|
|
||||||
MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
|
|
||||||
mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), reason);
|
|
||||||
final DocumentMapper documentMapper = mapperService.documentMapper("test");
|
|
||||||
|
|
||||||
MappingLookup dfm = documentMapper.mappers();
|
final MapperService mapperService = createMapperService(mapping(b -> {}));
|
||||||
try {
|
final DocumentMapper documentMapper = mapperService.documentMapper();
|
||||||
assertNotNull(dfm.indexAnalyzer().tokenStream("non_existing_field", "foo"));
|
|
||||||
fail();
|
expectThrows(IllegalArgumentException.class,
|
||||||
} catch (IllegalArgumentException e) {
|
() -> documentMapper.mappers().indexAnalyzer().tokenStream("non_existing_field", "foo"));
|
||||||
// ok that's expected
|
|
||||||
}
|
|
||||||
|
|
||||||
final AtomicBoolean stopped = new AtomicBoolean(false);
|
final AtomicBoolean stopped = new AtomicBoolean(false);
|
||||||
final CyclicBarrier barrier = new CyclicBarrier(2);
|
final CyclicBarrier barrier = new CyclicBarrier(2);
|
||||||
final AtomicReference<String> lastIntroducedFieldName = new AtomicReference<>();
|
final AtomicReference<String> lastIntroducedFieldName = new AtomicReference<>();
|
||||||
final AtomicReference<Exception> error = new AtomicReference<>();
|
final AtomicReference<Exception> error = new AtomicReference<>();
|
||||||
final Thread updater = new Thread() {
|
final Thread updater = new Thread(() -> {
|
||||||
@Override
|
try {
|
||||||
public void run() {
|
barrier.await();
|
||||||
try {
|
for (int i = 0; i < 200 && stopped.get() == false; i++) {
|
||||||
barrier.await();
|
final String fieldName = Integer.toString(i);
|
||||||
for (int i = 0; i < 200 && stopped.get() == false; i++) {
|
ParsedDocument doc = documentMapper.parse(source(b -> b.field(fieldName, "test")));
|
||||||
final String fieldName = Integer.toString(i);
|
Mapping update = doc.dynamicMappingsUpdate();
|
||||||
ParsedDocument doc = documentMapper.parse(new SourceToParse("test",
|
assert update != null;
|
||||||
"test",
|
lastIntroducedFieldName.set(fieldName);
|
||||||
fieldName,
|
mapperService.merge("_doc", new CompressedXContent(update.toString()), MergeReason.MAPPING_UPDATE);
|
||||||
new BytesArray("{ \"" + fieldName + "\" : \"test\" }"),
|
|
||||||
XContentType.JSON));
|
|
||||||
Mapping update = doc.dynamicMappingsUpdate();
|
|
||||||
assert update != null;
|
|
||||||
lastIntroducedFieldName.set(fieldName);
|
|
||||||
mapperService.merge("test", new CompressedXContent(update.toString()), MergeReason.MAPPING_UPDATE);
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
error.set(e);
|
|
||||||
} finally {
|
|
||||||
stopped.set(true);
|
|
||||||
}
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
error.set(e);
|
||||||
|
} finally {
|
||||||
|
stopped.set(true);
|
||||||
}
|
}
|
||||||
};
|
});
|
||||||
updater.start();
|
updater.start();
|
||||||
try {
|
try {
|
||||||
barrier.await();
|
barrier.await();
|
||||||
while(stopped.get() == false) {
|
while(stopped.get() == false) {
|
||||||
final String fieldName = lastIntroducedFieldName.get();
|
final String fieldName = lastIntroducedFieldName.get();
|
||||||
final BytesReference source = new BytesArray("{ \"" + fieldName + "\" : \"test\" }");
|
if (fieldName == null) {
|
||||||
ParsedDocument parsedDoc = documentMapper.parse(new SourceToParse("test",
|
continue;
|
||||||
"test",
|
}
|
||||||
"random",
|
ParsedDocument parsedDoc = mapperService.documentMapper().parse(source(b -> b.field(fieldName, "test")));
|
||||||
source,
|
|
||||||
XContentType.JSON));
|
|
||||||
if (parsedDoc.dynamicMappingsUpdate() != null) {
|
if (parsedDoc.dynamicMappingsUpdate() != null) {
|
||||||
// not in the mapping yet, try again
|
// not in the mapping yet, try again
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
dfm = documentMapper.mappers();
|
assertNotNull(mapperService.indexAnalyzer().tokenStream(fieldName, "foo"));
|
||||||
assertNotNull(dfm.indexAnalyzer().tokenStream(fieldName, "foo"));
|
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
stopped.set(true);
|
stopped.set(true);
|
||||||
|
@ -226,133 +205,82 @@ public class DocumentMapperTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDoNotRepeatOriginalMapping() throws IOException {
|
public void testDoNotRepeatOriginalMapping() throws IOException {
|
||||||
MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
|
MapperService mapperService
|
||||||
CompressedXContent mapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
|
= createMapperService(topMapping(b -> b.startObject("_source").field("enabled", false).endObject()));
|
||||||
.startObject("type")
|
|
||||||
.startObject("_source")
|
|
||||||
.field("enabled", false)
|
|
||||||
.endObject()
|
|
||||||
.endObject().endObject()));
|
|
||||||
MapperService mapperService = createIndex("test").mapperService();
|
|
||||||
mapperService.merge("type", mapping, reason);
|
|
||||||
|
|
||||||
CompressedXContent update = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
|
merge(mapperService, fieldMapping(b -> b.field("type", "text")));
|
||||||
.startObject("type")
|
|
||||||
.startObject("properties")
|
|
||||||
.startObject("foo")
|
|
||||||
.field("type", "text")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject().endObject()));
|
|
||||||
DocumentMapper mapper = mapperService.merge("type", update, reason);
|
|
||||||
|
|
||||||
assertNotNull(mapper.mappers().getMapper("foo"));
|
assertNotNull(mapperService.documentMapper().mappers().getMapper("field"));
|
||||||
assertFalse(mapper.sourceMapper().enabled());
|
assertFalse(mapperService.documentMapper().sourceMapper().enabled());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMergeMetadataFieldsForIndexTemplates() throws IOException {
|
public void testMergeMetadataFieldsForIndexTemplates() throws IOException {
|
||||||
CompressedXContent mapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
|
MapperService mapperService
|
||||||
.startObject("type")
|
= createMapperService(topMapping(b -> b.startObject("_source").field("enabled", false).endObject()));
|
||||||
.startObject("_source")
|
|
||||||
.field("enabled", false)
|
|
||||||
.endObject()
|
|
||||||
.endObject().endObject()));
|
|
||||||
MapperService mapperService = createIndex("test").mapperService();
|
|
||||||
mapperService.merge("type", mapping, MergeReason.INDEX_TEMPLATE);
|
|
||||||
|
|
||||||
CompressedXContent update = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
|
merge(mapperService, MergeReason.INDEX_TEMPLATE,
|
||||||
.startObject("type")
|
topMapping(b -> b.startObject("_source").field("enabled", true).endObject()));
|
||||||
.startObject("_source")
|
DocumentMapper mapper = mapperService.documentMapper();
|
||||||
.field("enabled", true)
|
|
||||||
.endObject()
|
|
||||||
.endObject().endObject()));
|
|
||||||
DocumentMapper mapper = mapperService.merge("type", update, MergeReason.INDEX_TEMPLATE);
|
|
||||||
assertTrue(mapper.sourceMapper().enabled());
|
assertTrue(mapper.sourceMapper().enabled());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMergeMeta() throws IOException {
|
public void testMergeMeta() throws IOException {
|
||||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
|
||||||
|
|
||||||
String initMapping = Strings
|
DocumentMapper initMapper
|
||||||
.toString(XContentFactory.jsonBuilder()
|
= createDocumentMapper(topMapping(b -> b.startObject("_meta").field("foo", "bar").endObject()));
|
||||||
.startObject()
|
|
||||||
.startObject("test")
|
|
||||||
.startObject("_meta")
|
|
||||||
.field("foo").value("bar")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject());
|
|
||||||
DocumentMapper initMapper = parser.parse("test", new CompressedXContent(initMapping));
|
|
||||||
|
|
||||||
assertThat(initMapper.meta().get("foo"), equalTo("bar"));
|
assertThat(initMapper.meta().get("foo"), equalTo("bar"));
|
||||||
|
|
||||||
String updateMapping = Strings
|
DocumentMapper updatedMapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text")));
|
||||||
.toString(XContentFactory.jsonBuilder()
|
|
||||||
.startObject()
|
|
||||||
.startObject("test")
|
|
||||||
.startObject("properties")
|
|
||||||
.startObject("name").field("type", "text").endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject());
|
|
||||||
DocumentMapper updatedMapper = parser.parse("test", new CompressedXContent(updateMapping));
|
|
||||||
|
|
||||||
DocumentMapper mergedMapper = initMapper.merge(updatedMapper.mapping(), MergeReason.MAPPING_UPDATE);
|
DocumentMapper mergedMapper = initMapper.merge(updatedMapper.mapping(), MergeReason.MAPPING_UPDATE);
|
||||||
assertThat(mergedMapper.meta().get("foo"), equalTo("bar"));
|
assertThat(mergedMapper.meta().get("foo"), equalTo("bar"));
|
||||||
|
|
||||||
updateMapping = Strings
|
updatedMapper
|
||||||
.toString(XContentFactory.jsonBuilder()
|
= createDocumentMapper(topMapping(b -> b.startObject("_meta").field("foo", "new_bar").endObject()));
|
||||||
.startObject()
|
|
||||||
.startObject("test")
|
|
||||||
.startObject("_meta")
|
|
||||||
.field("foo").value("new_bar")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject());
|
|
||||||
updatedMapper = parser.parse("test", new CompressedXContent(updateMapping));
|
|
||||||
|
|
||||||
mergedMapper = initMapper.merge(updatedMapper.mapping(), MergeReason.MAPPING_UPDATE);
|
mergedMapper = initMapper.merge(updatedMapper.mapping(), MergeReason.MAPPING_UPDATE);
|
||||||
assertThat(mergedMapper.meta().get("foo"), equalTo("new_bar"));
|
assertThat(mergedMapper.meta().get("foo"), equalTo("new_bar"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMergeMetaForIndexTemplate() throws IOException {
|
public void testMergeMetaForIndexTemplate() throws IOException {
|
||||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
|
||||||
|
|
||||||
String initMapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
|
DocumentMapper initMapper = createDocumentMapper(topMapping(b -> {
|
||||||
.startObject("_meta")
|
b.startObject("_meta");
|
||||||
.field("field", "value")
|
{
|
||||||
.startObject("object")
|
b.field("field", "value");
|
||||||
.field("field1", "value1")
|
b.startObject("object");
|
||||||
.field("field2", "value2")
|
{
|
||||||
.endObject()
|
b.field("field1", "value1");
|
||||||
.endObject()
|
b.field("field2", "value2");
|
||||||
.endObject());
|
}
|
||||||
DocumentMapper initMapper = parser.parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(initMapping));
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
|
|
||||||
Map<String, Object> expected = org.elasticsearch.common.collect.Map.of(
|
Map<String, Object> expected = org.elasticsearch.common.collect.Map.of(
|
||||||
"field", "value",
|
"field", "value",
|
||||||
"object", org.elasticsearch.common.collect.Map.of("field1", "value1", "field2", "value2"));
|
"object", org.elasticsearch.common.collect.Map.of("field1", "value1", "field2", "value2"));
|
||||||
assertThat(initMapper.meta(), equalTo(expected));
|
assertThat(initMapper.meta(), equalTo(expected));
|
||||||
|
|
||||||
String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
|
DocumentMapper updatedMapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text")));
|
||||||
.startObject("properties")
|
|
||||||
.startObject("name").field("type", "text").endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject());
|
|
||||||
DocumentMapper updatedMapper = parser.parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(updateMapping));
|
|
||||||
DocumentMapper mergedMapper = initMapper.merge(updatedMapper.mapping(), MergeReason.INDEX_TEMPLATE);
|
DocumentMapper mergedMapper = initMapper.merge(updatedMapper.mapping(), MergeReason.INDEX_TEMPLATE);
|
||||||
assertThat(mergedMapper.meta(), equalTo(expected));
|
assertThat(mergedMapper.meta(), equalTo(expected));
|
||||||
|
|
||||||
updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
|
updatedMapper = createDocumentMapper(topMapping(b -> {
|
||||||
.startObject("_meta")
|
b.startObject("_meta");
|
||||||
.field("field", "value")
|
{
|
||||||
.startObject("object")
|
b.field("field", "value");
|
||||||
.field("field2", "new_value")
|
b.startObject("object");
|
||||||
.field("field3", "value3")
|
{
|
||||||
.endObject()
|
b.field("field2", "new_value");
|
||||||
.endObject()
|
b.field("field3", "value3");
|
||||||
.endObject());
|
}
|
||||||
updatedMapper = parser.parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(updateMapping));
|
b.endObject();
|
||||||
|
}
|
||||||
|
b.endObject();
|
||||||
|
}));
|
||||||
mergedMapper = mergedMapper.merge(updatedMapper.mapping(), MergeReason.INDEX_TEMPLATE);
|
mergedMapper = mergedMapper.merge(updatedMapper.mapping(), MergeReason.INDEX_TEMPLATE);
|
||||||
|
|
||||||
expected = org.elasticsearch.common.collect.Map.of(
|
expected = org.elasticsearch.common.collect.Map.of(
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -18,80 +18,55 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
|
||||||
import org.apache.lucene.index.IndexWriter;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.apache.lucene.store.Directory;
|
|
||||||
import org.elasticsearch.common.Strings;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.common.lucene.Lucene;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
|
||||||
import org.elasticsearch.index.IndexService;
|
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
|
||||||
public class DoubleIndexingDocTests extends ESSingleNodeTestCase {
|
public class DoubleIndexingDocTests extends MapperServiceTestCase {
|
||||||
public void testDoubleIndexingSameDoc() throws Exception {
|
public void testDoubleIndexingSameDoc() throws Exception {
|
||||||
Directory dir = newDirectory();
|
|
||||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), Lucene.STANDARD_ANALYZER));
|
|
||||||
|
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
MapperService mapperService = createMapperService(mapping(b -> {}));
|
||||||
.startObject("properties").endObject()
|
|
||||||
.endObject().endObject());
|
|
||||||
IndexService index = createIndex("test");
|
|
||||||
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get();
|
|
||||||
MapperService mapperService = index.mapperService();
|
|
||||||
DocumentMapper mapper = mapperService.documentMapper();
|
|
||||||
|
|
||||||
QueryShardContext context = index.newQueryShardContext(0, null, () -> 0L, null);
|
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> {
|
||||||
|
b.field("field1", "value1");
|
||||||
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
|
b.field("field2", 1);
|
||||||
.bytes(XContentFactory.jsonBuilder()
|
b.field("field3", 1.1);
|
||||||
.startObject()
|
b.field("field4", "2010-01-01");
|
||||||
.field("field1", "value1")
|
b.startArray("field5").value(1).value(2).value(3).endArray();
|
||||||
.field("field2", 1)
|
}));
|
||||||
.field("field3", 1.1)
|
|
||||||
.field("field4", "2010-01-01")
|
|
||||||
.startArray("field5").value(1).value(2).value(3).endArray()
|
|
||||||
.endObject()),
|
|
||||||
XContentType.JSON));
|
|
||||||
assertNotNull(doc.dynamicMappingsUpdate());
|
assertNotNull(doc.dynamicMappingsUpdate());
|
||||||
client().admin().indices().preparePutMapping("test").setType("type")
|
merge(mapperService, dynamicMapping(doc.dynamicMappingsUpdate()));
|
||||||
.setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get();
|
|
||||||
|
|
||||||
writer.addDocument(doc.rootDoc());
|
QueryShardContext qsc = mock(QueryShardContext.class);
|
||||||
writer.addDocument(doc.rootDoc());
|
|
||||||
|
|
||||||
IndexReader reader = DirectoryReader.open(writer);
|
withLuceneIndex(mapperService, iw -> {
|
||||||
IndexSearcher searcher = new IndexSearcher(reader);
|
iw.addDocument(doc.rootDoc());
|
||||||
|
iw.addDocument(doc.rootDoc());
|
||||||
|
}, reader -> {
|
||||||
|
IndexSearcher searcher = new IndexSearcher(reader);
|
||||||
|
TopDocs topDocs = searcher.search(mapperService.fieldType("field1").termQuery("value1", qsc), 10);
|
||||||
|
assertThat(topDocs.totalHits.value, equalTo(2L));
|
||||||
|
|
||||||
TopDocs topDocs = searcher.search(mapperService.fieldType("field1").termQuery("value1", context), 10);
|
topDocs = searcher.search(mapperService.fieldType("field2").termQuery("1", qsc), 10);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(2L));
|
assertThat(topDocs.totalHits.value, equalTo(2L));
|
||||||
|
|
||||||
topDocs = searcher.search(mapperService.fieldType("field2").termQuery("1", context), 10);
|
topDocs = searcher.search(mapperService.fieldType("field3").termQuery("1.1", qsc), 10);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(2L));
|
assertThat(topDocs.totalHits.value, equalTo(2L));
|
||||||
|
|
||||||
topDocs = searcher.search(mapperService.fieldType("field3").termQuery("1.1", context), 10);
|
topDocs = searcher.search(mapperService.fieldType("field4").termQuery("2010-01-01", qsc), 10);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(2L));
|
assertThat(topDocs.totalHits.value, equalTo(2L));
|
||||||
|
|
||||||
topDocs = searcher.search(mapperService.fieldType("field4").termQuery("2010-01-01", context), 10);
|
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("1", qsc), 10);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(2L));
|
assertThat(topDocs.totalHits.value, equalTo(2L));
|
||||||
|
|
||||||
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("1", context), 10);
|
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("2", qsc), 10);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(2L));
|
assertThat(topDocs.totalHits.value, equalTo(2L));
|
||||||
|
|
||||||
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("2", context), 10);
|
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("3", qsc), 10);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(2L));
|
assertThat(topDocs.totalHits.value, equalTo(2L));
|
||||||
|
});
|
||||||
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("3", context), 10);
|
|
||||||
assertThat(topDocs.totalHits.value, equalTo(2L));
|
|
||||||
writer.close();
|
|
||||||
reader.close();
|
|
||||||
dir.close();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,6 +94,12 @@ public abstract class MapperServiceTestCase extends ESTestCase {
|
||||||
return createMapperService(mappings).documentMapper();
|
return createMapperService(mappings).documentMapper();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected final DocumentMapper createDocumentMapper(String type, String mappings) throws IOException {
|
||||||
|
MapperService mapperService = createMapperService(mapping(b -> {}));
|
||||||
|
merge(type, mapperService, mappings);
|
||||||
|
return mapperService.documentMapper();
|
||||||
|
}
|
||||||
|
|
||||||
protected final MapperService createMapperService(XContentBuilder mappings) throws IOException {
|
protected final MapperService createMapperService(XContentBuilder mappings) throws IOException {
|
||||||
return createMapperService(getIndexSettings(), mappings);
|
return createMapperService(getIndexSettings(), mappings);
|
||||||
}
|
}
|
||||||
|
@ -158,7 +164,29 @@ public abstract class MapperServiceTestCase extends ESTestCase {
|
||||||
* Merge a new mapping into the one in the provided {@link MapperService}.
|
* Merge a new mapping into the one in the provided {@link MapperService}.
|
||||||
*/
|
*/
|
||||||
protected final void merge(MapperService mapperService, XContentBuilder mapping) throws IOException {
|
protected final void merge(MapperService mapperService, XContentBuilder mapping) throws IOException {
|
||||||
mapperService.merge("_doc", new CompressedXContent(BytesReference.bytes(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
|
merge(mapperService, MapperService.MergeReason.MAPPING_UPDATE, mapping);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Merge a new mapping into the one in the provided {@link MapperService}.
|
||||||
|
*/
|
||||||
|
protected final void merge(String type, MapperService mapperService, String mapping) throws IOException {
|
||||||
|
mapperService.merge(type, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Merge a new mapping into the one in the provided {@link MapperService} with a specific {@code MergeReason}
|
||||||
|
*/
|
||||||
|
protected final void merge(MapperService mapperService,
|
||||||
|
MapperService.MergeReason reason,
|
||||||
|
XContentBuilder mapping) throws IOException {
|
||||||
|
mapperService.merge("_doc", new CompressedXContent(BytesReference.bytes(mapping)), reason);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected final XContentBuilder topMapping(CheckedConsumer<XContentBuilder, IOException> buildFields) throws IOException {
|
||||||
|
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("_doc");
|
||||||
|
buildFields.accept(builder);
|
||||||
|
return builder.endObject().endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final XContentBuilder mapping(CheckedConsumer<XContentBuilder, IOException> buildFields) throws IOException {
|
protected final XContentBuilder mapping(CheckedConsumer<XContentBuilder, IOException> buildFields) throws IOException {
|
||||||
|
@ -167,6 +195,12 @@ public abstract class MapperServiceTestCase extends ESTestCase {
|
||||||
return builder.endObject().endObject().endObject();
|
return builder.endObject().endObject().endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected final XContentBuilder dynamicMapping(Mapping dynamicMapping) throws IOException {
|
||||||
|
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
|
||||||
|
dynamicMapping.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||||
|
return builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
protected final XContentBuilder fieldMapping(CheckedConsumer<XContentBuilder, IOException> buildField) throws IOException {
|
protected final XContentBuilder fieldMapping(CheckedConsumer<XContentBuilder, IOException> buildField) throws IOException {
|
||||||
return mapping(b -> {
|
return mapping(b -> {
|
||||||
b.startObject("field");
|
b.startObject("field");
|
||||||
|
|
Loading…
Reference in New Issue