Convert some more mapping tests to MapperServiceTestCase (#62089)

We don't need to extend ESSingleNodeTestCase for all these tests.
This commit is contained in:
Alan Woodward 2020-09-08 17:25:01 +01:00 committed by Alan Woodward
parent 5bca671f57
commit 5f05eef7e3
7 changed files with 1362 additions and 1807 deletions

View File

@ -19,37 +19,22 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.test.ESSingleNodeTestCase;
public class CamelCaseFieldNameTests extends MapperServiceTestCase {
public class CamelCaseFieldNameTests extends ESSingleNodeTestCase {
public void testCamelCaseFieldNameStaysAsIs() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject());
MapperService mapperService = createMapperService(mapping(b -> {}));
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get();
DocumentMapper documentMapper = index.mapperService().documentMapper("type");
ParsedDocument doc = documentMapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("thisIsCamelCase", "value1")
.endObject()),
XContentType.JSON));
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.field("thisIsCamelCase", "value1")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type")
.setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get();
documentMapper = index.mapperService().documentMapper("type");
merge(mapperService, dynamicMapping(doc.dynamicMappingsUpdate()));
DocumentMapper documentMapper = mapperService.documentMapper();
assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase"));
assertNull(documentMapper.mappers().getMapper("this_is_camel_case"));
documentMapper = index.mapperService().documentMapperParser().parse("type", documentMapper.mappingSource());
documentMapper = mapperService.documentMapperParser().parse("_doc", documentMapper.mappingSource());
assertNotNull(documentMapper.mappers().getMapper("thisIsCamelCase"));
assertNull(documentMapper.mappers().getMapper("this_is_camel_case"));

View File

@ -20,21 +20,15 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@ -44,35 +38,33 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;
public class CopyToMapperTests extends ESSingleNodeTestCase {
public class CopyToMapperTests extends MapperServiceTestCase {
@SuppressWarnings("unchecked")
public void testCopyToFieldsParsing() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("copy_test")
.field("type", "text")
.array("copy_to", "another_field", "cyclic_test")
.endObject()
.startObject("another_field")
.field("type", "text")
.endObject()
.startObject("cyclic_test")
.field("type", "text")
.array("copy_to", "copy_test")
.endObject()
.startObject("int_to_str_test")
.field("type", "integer")
.field("doc_values", false)
.array("copy_to", "another_field", "new_field")
.endObject()
.endObject().endObject().endObject());
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("type1").setSource(mapping, XContentType.JSON).get();
DocumentMapper docMapper = index.mapperService().documentMapper("type1");
Mapper fieldMapper = docMapper.mappers().getMapper("copy_test");
MapperService mapperService = createMapperService(mapping(b -> {
b.startObject("copy_test");
{
b.field("type", "text");
b.array("copy_to", "another_field", "cyclic_test");
}
b.endObject();
b.startObject("another_field").field("type", "text").endObject();
b.startObject("cyclic_test");
{
b.field("type", "text");
b.array("copy_to", "copy_test");
}
b.endObject();
b.startObject("int_to_str_test");
{
b.field("type", "integer");
b.field("doc_values", false);
b.array("copy_to", "another_field", "new_field");
}
b.endObject();
}));
Mapper fieldMapper = mapperService.documentMapper().mappers().getMapper("copy_test");
// Check json serialization
TextFieldMapper stringFieldMapper = (TextFieldMapper) fieldMapper;
@ -91,13 +83,11 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
assertThat(copyToList.get(1), equalTo("cyclic_test"));
// Check data parsing
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
.field("cyclic_test", "bar")
.field("int_to_str_test", 42)
.endObject());
ParsedDocument parsedDoc = docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON));
ParsedDocument parsedDoc = mapperService.documentMapper().parse(source(b -> {
b.field("copy_test", "foo");
b.field("cyclic_test", "bar");
b.field("int_to_str_test", 42);
}));
ParseContext.Document doc = parsedDoc.rootDoc();
assertThat(doc.getFields("copy_test").length, equalTo(2));
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
@ -118,43 +108,41 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
assertThat(doc.getFields("new_field")[0].numericValue().intValue(), equalTo(42));
assertNotNull(parsedDoc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type1")
.setSource(parsedDoc.dynamicMappingsUpdate().toString(), XContentType.JSON).get();
merge(mapperService, dynamicMapping(parsedDoc.dynamicMappingsUpdate()));
docMapper = index.mapperService().documentMapper("type1");
fieldMapper = docMapper.mappers().getMapper("new_field");
fieldMapper = mapperService.documentMapper().mappers().getMapper("new_field");
assertThat(fieldMapper.typeName(), equalTo("long"));
}
public void testCopyToFieldsInnerObjectParsing() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties")
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("copy_test");
{
b.field("type", "text");
b.field("copy_to", "very.inner.field");
}
b.endObject();
b.startObject("very");
{
b.field("type", "object");
b.startObject("properties");
{
b.startObject("inner").field("type", "object").endObject();
}
b.endObject();
}
b.endObject();
}));
.startObject("copy_test")
.field("type", "text")
.field("copy_to", "very.inner.field")
.endObject()
ParseContext.Document doc = docMapper.parse(source(b -> {
b.field("copy_test", "foo");
b.startObject("foo");
{
b.startObject("bar").field("baz", "zoo").endObject();
}
b.endObject();
})).rootDoc();
.startObject("very")
.field("type", "object")
.startObject("properties")
.startObject("inner")
.field("type", "object")
.endObject()
.endObject()
.endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
.startObject("foo").startObject("bar").field("baz", "zoo").endObject().endObject()
.endObject());
ParseContext.Document doc = docMapper.parse(new SourceToParse("test", "type1", "1", json,
XContentType.JSON)).rootDoc();
assertThat(doc.getFields("copy_test").length, equalTo(1));
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
@ -164,25 +152,20 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
}
public void testCopyToDynamicInnerObjectParsing() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("copy_test")
.field("type", "text")
.field("copy_to", "very.inner.field")
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("copy_test");
{
b.field("type", "text");
b.field("copy_to", "very.inner.field");
}
b.endObject();
}));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
ParseContext.Document doc = docMapper.parse(source(b -> {
b.field("copy_test", "foo");
b.field("new_field", "bar");
})).rootDoc();
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
.field("new_field", "bar")
.endObject());
ParseContext.Document doc = docMapper.parse(new SourceToParse("test", "type1", "1", json,
XContentType.JSON)).rootDoc();
assertThat(doc.getFields("copy_test").length, equalTo(1));
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
@ -194,33 +177,30 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
}
public void testCopyToDynamicInnerInnerObjectParsing() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("copy_test")
.field("type", "text")
.field("copy_to", "very.far.inner.field")
.endObject()
.startObject("very")
.field("type", "object")
.startObject("properties")
.startObject("far")
.field("type", "object")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("copy_test");
{
b.field("type", "text");
b.field("copy_to", "very.far.inner.field");
}
b.endObject();
b.startObject("very");
{
b.field("type", "object");
b.startObject("properties");
{
b.startObject("far").field("type", "object").endObject();
}
b.endObject();
}
b.endObject();
}));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
ParseContext.Document doc = docMapper.parse(source(b -> {
b.field("copy_test", "foo");
b.field("new_field", "bar");
})).rootDoc();
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
.field("new_field", "bar")
.endObject());
ParseContext.Document doc = docMapper.parse(new SourceToParse("test", "type1", "1", json,
XContentType.JSON)).rootDoc();
assertThat(doc.getFields("copy_test").length, equalTo(1));
assertThat(doc.getFields("copy_test")[0].stringValue(), equalTo("foo"));
@ -232,95 +212,84 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
}
public void testCopyToStrictDynamicInnerObjectParsing() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.field("dynamic", "strict")
.startObject("properties")
.startObject("copy_test")
.field("type", "text")
.field("copy_to", "very.inner.field")
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper docMapper = createDocumentMapper(topMapping(b -> {
b.field("dynamic", "strict");
b.startObject("properties");
{
b.startObject("copy_test");
{
b.field("type", "text");
b.field("copy_to", "very.inner.field");
}
b.endObject();
}
b.endObject();
}));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> docMapper.parse(source(b -> b.field("copy_test", "foo"))));
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
.endObject());
try {
docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc();
fail();
} catch (MapperParsingException ex) {
assertThat(ex.getMessage(), startsWith("mapping set to strict, dynamic introduction of [very] within [type1] is not allowed"));
}
assertThat(e.getMessage(), startsWith("mapping set to strict, dynamic introduction of [very] within [_doc] is not allowed"));
}
public void testCopyToInnerStrictDynamicInnerObjectParsing() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties")
.startObject("copy_test")
.field("type", "text")
.field("copy_to", "very.far.field")
.endObject()
.startObject("very")
.field("type", "object")
.startObject("properties")
.startObject("far")
.field("type", "object")
.field("dynamic", "strict")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("copy_test");
{
b.field("type", "text");
b.field("copy_to", "very.far.field");
}
b.endObject();
b.startObject("very");
{
b.field("type", "object");
b.startObject("properties");
{
b.startObject("far");
{
b.field("type", "object");
b.field("dynamic", "strict");
}
b.endObject();
}
b.endObject();
}
b.endObject();
}));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> docMapper.parse(source(b -> b.field("copy_test", "foo"))));
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
.endObject());
try {
docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc();
fail();
} catch (MapperParsingException ex) {
assertThat(ex.getMessage(),
startsWith("mapping set to strict, dynamic introduction of [field] within [very.far] is not allowed"));
}
assertThat(e.getMessage(),
startsWith("mapping set to strict, dynamic introduction of [field] within [very.far] is not allowed"));
}
public void testCopyToFieldMerge() throws Exception {
String mappingBefore = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("copy_test")
.field("type", "text")
.array("copy_to", "foo", "bar")
.endObject()
.endObject().endObject().endObject());
String mappingAfter = Strings.toString(jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("copy_test")
.field("type", "text")
.array("copy_to", "baz", "bar")
.endObject()
.endObject().endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper docMapperBefore = mapperService.merge("type1", new CompressedXContent(mappingBefore),
MapperService.MergeReason.MAPPING_UPDATE);
MapperService mapperService = createMapperService(mapping(b -> {
b.startObject("copy_test");
{
b.field("type", "text");
b.array("copy_to", "foo", "bar");
}
b.endObject();
}));
DocumentMapper docMapperBefore = mapperService.documentMapper();
FieldMapper fieldMapperBefore = (FieldMapper) docMapperBefore.mappers().getMapper("copy_test");
assertEquals(Arrays.asList("foo", "bar"), fieldMapperBefore.copyTo().copyToFields());
DocumentMapper docMapperAfter = mapperService.merge("type1", new CompressedXContent(mappingAfter),
MapperService.MergeReason.MAPPING_UPDATE);
merge(mapperService, mapping(b -> {
b.startObject("copy_test");
{
b.field("type", "text");
b.array("copy_to", "baz", "bar");
}
b.endObject();
}));
DocumentMapper docMapperAfter = mapperService.documentMapper();
FieldMapper fieldMapperAfter = (FieldMapper) docMapperAfter.mappers().getMapper("copy_test");
assertEquals(Arrays.asList("baz", "bar"), fieldMapperAfter.copyTo().copyToFields());
@ -328,73 +297,84 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
}
public void testCopyToNestedField() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
XContentBuilder mapping = jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("target")
.field("type", "long")
.field("doc_values", false)
.endObject()
.startObject("n1")
.field("type", "nested")
.startObject("properties")
.startObject("target")
.field("type", "long")
.field("doc_values", false)
.endObject()
.startObject("n2")
.field("type", "nested")
.startObject("properties")
.startObject("target")
.field("type", "long")
.field("doc_values", false)
.endObject()
.startObject("source")
.field("type", "long")
.field("doc_values", false)
.startArray("copy_to")
.value("target") // should go to the root doc
.value("n1.target") // should go to the parent doc
.value("n1.n2.target") // should go to the current doc
.endArray()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("target");
{
b.field("type", "long");
b.field("doc_values", false);
}
b.endObject();
b.startObject("n1");
{
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("target");
{
b.field("type", "long");
b.field("doc_values", false);
}
b.endObject();
b.startObject("n2");
{
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("target");
{
b.field("type", "long");
b.field("doc_values", false);
}
b.endObject();
b.startObject("source");
{
b.field("type", "long");
b.field("doc_values", false);
b.startArray("copy_to");
{
b.value("target"); // should go to the root doc
b.value("n1.target"); // should go to the parent doc
b.value("n1.n2.target"); // should go to the current doc
}
b.endArray();
}
b.endObject();
}
b.endObject();
}
b.endObject();
}
b.endObject();
}
b.endObject();
}));
DocumentMapper mapper = parser.parse("type", new CompressedXContent(Strings.toString(mapping)));
ParsedDocument doc = mapper.parse(source(b -> {
b.startArray("n1");
{
b.startObject();
{
b.startArray("n2");
{
b.startObject().field("source", 3).endObject();
b.startObject().field("source", 5).endObject();
}
b.endArray();
}
b.endObject();
b.startObject();
{
b.startArray("n2");
{
b.startObject().field("source", 7).endObject();
}
b.endArray();
}
b.endObject();
}
b.endArray();
}));
XContentBuilder jsonDoc = XContentFactory.jsonBuilder()
.startObject()
.startArray("n1")
.startObject()
.startArray("n2")
.startObject()
.field("source", 3)
.endObject()
.startObject()
.field("source", 5)
.endObject()
.endArray()
.endObject()
.startObject()
.startArray("n2")
.startObject()
.field("source", 7)
.endObject()
.endArray()
.endObject()
.endArray()
.endObject();
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1",
BytesReference.bytes(jsonDoc), XContentType.JSON));
assertEquals(6, doc.docs().size());
Document nested = doc.docs().get(0);
@ -428,145 +408,139 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
assertFieldValue(root, "n1.n2.target");
}
public void testCopyToChildNested() throws Exception {
IndexService indexService = createIndex("test");
XContentBuilder rootToNestedMapping = jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("source")
.field("type", "long")
.field("copy_to", "n1.target")
.endObject()
.startObject("n1")
.field("type", "nested")
.startObject("properties")
.startObject("target")
.field("type", "long")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)),
MergeReason.MAPPING_UPDATE));
public void testCopyToChildNested() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> {
b.startObject("source");
{
b.field("type", "long");
b.field("copy_to", "n1.target");
}
b.endObject();
b.startObject("n1");
{
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("target").field("type", "long").endObject();
}
b.endObject();
}
b.endObject();
})));
assertThat(e.getMessage(), Matchers.startsWith("Illegal combination of [copy_to] and [nested] mappings"));
XContentBuilder nestedToNestedMapping = jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("n1")
.field("type", "nested")
.startObject("properties")
.startObject("source")
.field("type", "long")
.field("copy_to", "n1.n2.target")
.endObject()
.startObject("n2")
.field("type", "nested")
.startObject("properties")
.startObject("target")
.field("type", "long")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
e = expectThrows(IllegalArgumentException.class,
() -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(nestedToNestedMapping)),
MergeReason.MAPPING_UPDATE));
expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> {
b.startObject("n1");
{
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("source");
{
b.field("type", "long");
b.field("copy_to", "n1.n2.target");
}
b.endObject();
b.startObject("n2");
{
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("target").field("type", "long").endObject();
}
b.endObject();
}
b.endObject();
}
b.endObject();
}
b.endObject();
})));
}
public void testCopyToSiblingNested() throws Exception {
IndexService indexService = createIndex("test");
XContentBuilder rootToNestedMapping = jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("n1")
.field("type", "nested")
.startObject("properties")
.startObject("source")
.field("type", "long")
.field("copy_to", "n2.target")
.endObject()
.endObject()
.endObject()
.startObject("n2")
.field("type", "nested")
.startObject("properties")
.startObject("target")
.field("type", "long")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)),
MergeReason.MAPPING_UPDATE));
public void testCopyToSiblingNested() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> {
b.startObject("n1");
{
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("source");
{
b.field("type", "long");
b.field("copy_to", "n2.target");
}
b.endObject();
}
b.endObject();
}
b.endObject();
b.startObject("n2");
{
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("target").field("type", "long").endObject();
}
b.endObject();
}
b.endObject();
})));
assertThat(e.getMessage(), Matchers.startsWith("Illegal combination of [copy_to] and [nested] mappings"));
}
public void testCopyToObject() throws Exception {
IndexService indexService = createIndex("test");
XContentBuilder rootToNestedMapping = jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("source")
.field("type", "long")
.field("copy_to", "target")
.endObject()
.startObject("target")
.field("type", "object")
.endObject()
.endObject()
.endObject()
.endObject();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> indexService.mapperService().merge("_doc", new CompressedXContent(BytesReference.bytes(rootToNestedMapping)),
MergeReason.MAPPING_UPDATE));
public void testCopyToObject() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> {
b.startObject("source");
{
b.field("type", "long");
b.field("copy_to", "target");
}
b.endObject();
b.startObject("target");
{
b.field("type", "object");
}
b.endObject();
})));
assertThat(e.getMessage(), Matchers.startsWith("Cannot copy to field [target] since it is mapped as an object"));
}
public void testCopyToDynamicNestedObjectParsing() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startArray("dynamic_templates")
.startObject()
.startObject("objects")
.field("match_mapping_type", "object")
.startObject("mapping")
.field("type", "nested")
.endObject()
.endObject()
.endObject()
.endArray()
.startObject("properties")
.startObject("copy_test")
.field("type", "text")
.field("copy_to", "very.inner.field")
.endObject()
.endObject()
.endObject().endObject());
DocumentMapper docMapper = createDocumentMapper(topMapping(b -> {
b.startArray("dynamic_templates");
{
b.startObject();
{
b.startObject("objects");
{
b.field("match_mapping_type", "object");
b.startObject("mapping").field("type", "nested").endObject();
}
b.endObject();
}
b.endObject();
}
b.endArray();
b.startObject("properties");
{
b.startObject("copy_test");
{
b.field("type", "text");
b.field("copy_to", "very.inner.field");
}
b.endObject();
}
b.endObject();
}));
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser()
.parse("type1", new CompressedXContent(mapping));
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source(b -> {
b.field("copy_test", "foo");
b.field("new_field", "bar");
})));
BytesReference json = BytesReference.bytes(jsonBuilder().startObject()
.field("copy_test", "foo")
.field("new_field", "bar")
.endObject());
try {
docMapper.parse(new SourceToParse("test", "type1", "1", json, XContentType.JSON)).rootDoc();
fail();
} catch (MapperParsingException ex) {
assertThat(ex.getMessage(), startsWith("It is forbidden to create dynamic nested objects ([very]) through `copy_to`"));
}
assertThat(e.getMessage(), startsWith("It is forbidden to create dynamic nested objects ([very]) through `copy_to`"));
}
private void assertFieldValue(Document doc, String field, Number... expected) {
@ -581,95 +555,78 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
assertArrayEquals(expected, actual);
}
public void testCopyToMultiField() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc")
.startObject("properties")
.startObject("my_field")
.field("type", "keyword")
.field("copy_to", "my_field.bar")
.startObject("fields")
.startObject("bar")
.field("type", "text")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> mapperService.merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE));
public void testCopyToMultiField() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> {
b.startObject("my_field");
{
b.field("type", "keyword");
b.field("copy_to", "my_field.bar");
b.startObject("fields");
{
b.startObject("bar").field("type", "text").endObject();
}
b.endObject();
}
b.endObject();
})));
assertEquals("[copy_to] may not be used to copy to a multi-field: [my_field.bar]", e.getMessage());
}
public void testNestedCopyTo() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc")
.startObject("properties")
.startObject("n")
.field("type", "nested")
.startObject("properties")
.startObject("foo")
.field("type", "keyword")
.field("copy_to", "n.bar")
.endObject()
.startObject("bar")
.field("type", "text")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
mapperService.merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); // no exception
public void testNestedCopyTo() throws IOException {
createDocumentMapper(fieldMapping(b -> {
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("foo");
{
b.field("type", "keyword");
b.field("copy_to", "n.bar");
}
b.endObject();
b.startObject("bar").field("type", "text").endObject();
}
b.endObject();
}));
}
public void testNestedCopyToMultiField() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc")
.startObject("properties")
.startObject("n")
.field("type", "nested")
.startObject("properties")
.startObject("my_field")
.field("type", "keyword")
.field("copy_to", "n.my_field.bar")
.startObject("fields")
.startObject("bar")
.field("type", "text")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> mapperService.merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE));
assertEquals("[copy_to] may not be used to copy to a multi-field: [n.my_field.bar]", e.getMessage());
public void testNestedCopyToMultiField() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> createMapperService(fieldMapping(b -> {
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("my_field");
{
b.field("type", "keyword");
b.field("copy_to", "field.my_field.bar");
b.startObject("fields");
{
b.startObject("bar").field("type", "text").endObject();
}
b.endObject();
}
b.endObject();
}
b.endObject();
})));
assertEquals("[copy_to] may not be used to copy to a multi-field: [field.my_field.bar]", e.getMessage());
}
public void testCopyFromMultiField() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("_doc")
.startObject("properties")
.startObject("my_field")
.field("type", "keyword")
.startObject("fields")
.startObject("bar")
.field("type", "text")
.field("copy_to", "my_field.baz")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject().endObject());
MapperService mapperService = createIndex("test").mapperService();
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> mapperService.merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE));
public void testCopyFromMultiField() {
MapperParsingException e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
b.field("type", "keyword");
b.startObject("fields");
{
b.startObject("bar");
{
b.field("type", "text");
b.field("copy_to", "my_field.baz");
}
b.endObject();
}
b.endObject();
})));
assertThat(e.getMessage(),
Matchers.containsString("copy_to in multi fields is not allowed. Found the copy_to in field [bar] " +
"which is within a multi field."));
Matchers.containsString("copy_to in multi fields is not allowed. Found the copy_to in field [bar] " +
"which is within a multi field."));
}
}

View File

@ -19,84 +19,63 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.test.ESSingleNodeTestCase;
import static org.hamcrest.Matchers.equalTo;
public class DocumentMapperParserTests extends ESSingleNodeTestCase {
public void testTypeLevel() throws Exception {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject());
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertThat(mapper.type(), equalTo("type"));
}
public class DocumentMapperParserTests extends MapperServiceTestCase {
public void testFieldNameWithDots() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo.bar").field("type", "text").endObject()
.startObject("foo.baz").field("type", "keyword").endObject()
.endObject().endObject().endObject());
DocumentMapper docMapper = mapperParser.parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("foo.bar").field("type", "text").endObject();
b.startObject("foo.baz").field("type", "keyword").endObject();
}));
assertNotNull(docMapper.mappers().getMapper("foo.bar"));
assertNotNull(docMapper.mappers().getMapper("foo.baz"));
assertNotNull(docMapper.objectMappers().get("foo"));
}
public void testFieldNameWithDeepDots() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo.bar").field("type", "text").endObject()
.startObject("foo.baz").startObject("properties")
.startObject("deep.field").field("type", "keyword").endObject().endObject()
.endObject().endObject().endObject().endObject());
DocumentMapper docMapper = mapperParser.parse("type", new CompressedXContent(mapping));
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("foo.bar").field("type", "text").endObject();
b.startObject("foo.baz");
{
b.startObject("properties");
{
b.startObject("deep.field").field("type", "keyword").endObject();
}
b.endObject();
}
b.endObject();
}));
assertNotNull(docMapper.mappers().getMapper("foo.bar"));
assertNotNull(docMapper.mappers().getMapper("foo.baz.deep.field"));
assertNotNull(docMapper.objectMappers().get("foo"));
}
public void testFieldNameWithDotsConflict() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").field("type", "text").endObject()
.startObject("foo.baz").field("type", "keyword").endObject()
.endObject().endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
mapperParser.parse("type", new CompressedXContent(mapping)));
public void testFieldNameWithDotsConflict() {
MapperParsingException e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(mapping(b -> {
b.startObject("foo").field("type", "text").endObject();
b.startObject("foo.baz").field("type", "keyword").endObject();
})));
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [text] to [ObjectMapper]"));
}
public void testMultiFieldsWithFieldAlias() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser mapperParser = indexService.mapperService().documentMapperParser();
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties")
.startObject("field")
.field("type", "text")
.startObject("fields")
.startObject("alias")
.field("type", "alias")
.field("path", "other-field")
.endObject()
.endObject()
.endObject()
.startObject("other-field")
.field("type", "keyword")
.endObject()
.endObject()
.endObject().endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapperParser.parse("type", new CompressedXContent(mapping)));
assertEquals("Type [alias] cannot be used in multi field", e.getMessage());
public void testMultiFieldsWithFieldAlias() {
MapperParsingException e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(mapping(b -> {
b.startObject("field");
{
b.field("type", "text");
b.startObject("fields");
{
b.startObject("alias");
{
b.field("type", "alias");
b.field("path", "other-field");
}
b.endObject();
}
b.endObject();
}
b.endObject();
b.startObject("other-field").field("type", "keyword").endObject();
})));
assertEquals("Failed to parse mapping [_doc]: Type [alias] cannot be used in multi field", e.getMessage());
}
}

View File

@ -19,18 +19,19 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicBoolean;
@ -41,24 +42,29 @@ import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class DocumentMapperTests extends ESSingleNodeTestCase {
public class DocumentMapperTests extends MapperServiceTestCase {
public void testAddFields() throws Exception {
String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
.startObject("name").field("type", "text").endObject()
.endObject().endObject().endObject());
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse("person", new CompressedXContent(stage1Mapping));
String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
.startObject("name").field("type", "text").endObject()
.startObject("age").field("type", "integer").endObject()
.startObject("obj1").startObject("properties").startObject("prop1").field("type", "integer").endObject().endObject()
.endObject()
.endObject().endObject().endObject());
DocumentMapper stage2 = parser.parse("person", new CompressedXContent(stage2Mapping));
DocumentMapper stage1
= createDocumentMapper(mapping(b -> b.startObject("name").field("type", "text").endObject()));
DocumentMapper stage2 = createDocumentMapper(mapping(b -> {
b.startObject("name").field("type", "text").endObject();
b.startObject("age").field("type", "integer").endObject();
b.startObject("obj1");
{
b.startObject("properties");
{
b.startObject("prop1").field("type", "integer").endObject();
}
b.endObject();
}
b.endObject();
}));
MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
DocumentMapper merged = stage1.merge(stage2.mapping(), reason);
// stage1 mapping should not have been modified
assertThat(stage1.mappers().getMapper("age"), nullValue());
assertThat(stage1.mappers().getMapper("obj1.prop1"), nullValue());
@ -68,14 +74,10 @@ public class DocumentMapperTests extends ESSingleNodeTestCase {
}
public void testMergeObjectDynamic() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String objectMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").endObject().endObject());
DocumentMapper mapper = parser.parse("type1", new CompressedXContent(objectMapping));
DocumentMapper mapper = createDocumentMapper(mapping(b -> {}));
assertNull(mapper.root().dynamic());
String withDynamicMapping = Strings.toString(
XContentFactory.jsonBuilder().startObject().startObject("type1").field("dynamic", "false").endObject().endObject());
DocumentMapper withDynamicMapper = parser.parse("type1", new CompressedXContent(withDynamicMapping));
DocumentMapper withDynamicMapper = createDocumentMapper(topMapping(b -> b.field("dynamic", "false")));
assertThat(withDynamicMapper.root().dynamic(), equalTo(ObjectMapper.Dynamic.FALSE));
DocumentMapper merged = mapper.merge(withDynamicMapper.mapping(), MergeReason.MAPPING_UPDATE);
@ -83,138 +85,115 @@ public class DocumentMapperTests extends ESSingleNodeTestCase {
}
public void testMergeObjectAndNested() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String objectMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("obj").field("type", "object").endObject()
.endObject().endObject().endObject());
DocumentMapper objectMapper = parser.parse("type1", new CompressedXContent(objectMapping));
String nestedMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("obj").field("type", "nested").endObject()
.endObject().endObject().endObject());
DocumentMapper nestedMapper = parser.parse("type1", new CompressedXContent(nestedMapping));
DocumentMapper objectMapper
= createDocumentMapper(mapping(b -> b.startObject("obj").field("type", "object").endObject()));
DocumentMapper nestedMapper
= createDocumentMapper(mapping(b -> b.startObject("obj").field("type", "nested").endObject()));
MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
try {
objectMapper.merge(nestedMapper.mapping(), reason);
fail();
} catch (IllegalArgumentException e) {
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> objectMapper.merge(nestedMapper.mapping(), reason));
assertThat(e.getMessage(), containsString("cannot change object mapping from non-nested to nested"));
}
try {
nestedMapper.merge(objectMapper.mapping(), reason);
fail();
} catch (IllegalArgumentException e) {
{
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> nestedMapper.merge(objectMapper.mapping(), reason));
assertThat(e.getMessage(), containsString("cannot change object mapping from nested to non-nested"));
}
}
@Override
protected IndexAnalyzers createIndexAnalyzers(IndexSettings indexSettings) {
Map<String, NamedAnalyzer> analyzers = new HashMap<>();
analyzers.put("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, new StandardAnalyzer()));
analyzers.put("keyword", new NamedAnalyzer("keyword", AnalyzerScope.INDEX, new KeywordAnalyzer()));
analyzers.put("whitespace", new NamedAnalyzer("whitespace", AnalyzerScope.INDEX, new WhitespaceAnalyzer()));
return new IndexAnalyzers(analyzers, Collections.emptyMap(), Collections.emptyMap());
}
public void testMergeSearchAnalyzer() throws Exception {
XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "text")
.field("analyzer", "standard")
.field("search_analyzer", "whitespace")
.endObject().endObject()
.endObject().endObject();
MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping1).mapperService();
MapperService mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "text");
b.field("analyzer", "default");
b.field("search_analyzer", "whitespace");
}));
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(), equalTo("whitespace"));
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(),
equalTo("whitespace"));
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "text")
.field("analyzer", "standard")
.field("search_analyzer", "keyword")
.endObject().endObject()
.endObject().endObject());
mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(), equalTo("keyword"));
merge(mapperService, fieldMapping(b -> {
b.field("type", "text");
b.field("analyzer", "default");
b.field("search_analyzer", "keyword");
}));
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(),
equalTo("keyword"));
}
public void testChangeSearchAnalyzerToDefault() throws Exception {
XContentBuilder mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "text")
.field("analyzer", "standard")
.field("search_analyzer", "whitespace")
.endObject().endObject()
.endObject().endObject();
MapperService mapperService = createIndex("test", Settings.EMPTY, "type", mapping1).mapperService();
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(), equalTo("whitespace"));
MapperService mapperService = createMapperService(fieldMapping(b -> {
b.field("type", "text");
b.field("analyzer", "default");
b.field("search_analyzer", "whitespace");
}));
String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field")
.field("type", "text")
.field("analyzer", "standard")
.endObject().endObject()
.endObject().endObject());
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(),
equalTo("whitespace"));
mapperService.merge("type", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE);
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(), equalTo("standard"));
merge(mapperService, fieldMapping(b -> {
b.field("type", "text");
b.field("analyzer", "default");
}));
assertThat(mapperService.fieldType("field").getTextSearchInfo().getSearchAnalyzer().name(),
equalTo("default"));
}
public void testConcurrentMergeTest() throws Throwable {
final MapperService mapperService = createIndex("test").mapperService();
MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
mapperService.merge("test", new CompressedXContent("{\"test\":{}}"), reason);
final DocumentMapper documentMapper = mapperService.documentMapper("test");
MappingLookup dfm = documentMapper.mappers();
try {
assertNotNull(dfm.indexAnalyzer().tokenStream("non_existing_field", "foo"));
fail();
} catch (IllegalArgumentException e) {
// ok that's expected
}
final MapperService mapperService = createMapperService(mapping(b -> {}));
final DocumentMapper documentMapper = mapperService.documentMapper();
expectThrows(IllegalArgumentException.class,
() -> documentMapper.mappers().indexAnalyzer().tokenStream("non_existing_field", "foo"));
final AtomicBoolean stopped = new AtomicBoolean(false);
final CyclicBarrier barrier = new CyclicBarrier(2);
final AtomicReference<String> lastIntroducedFieldName = new AtomicReference<>();
final AtomicReference<Exception> error = new AtomicReference<>();
final Thread updater = new Thread() {
@Override
public void run() {
try {
barrier.await();
for (int i = 0; i < 200 && stopped.get() == false; i++) {
final String fieldName = Integer.toString(i);
ParsedDocument doc = documentMapper.parse(new SourceToParse("test",
"test",
fieldName,
new BytesArray("{ \"" + fieldName + "\" : \"test\" }"),
XContentType.JSON));
Mapping update = doc.dynamicMappingsUpdate();
assert update != null;
lastIntroducedFieldName.set(fieldName);
mapperService.merge("test", new CompressedXContent(update.toString()), MergeReason.MAPPING_UPDATE);
}
} catch (Exception e) {
error.set(e);
} finally {
stopped.set(true);
final Thread updater = new Thread(() -> {
try {
barrier.await();
for (int i = 0; i < 200 && stopped.get() == false; i++) {
final String fieldName = Integer.toString(i);
ParsedDocument doc = documentMapper.parse(source(b -> b.field(fieldName, "test")));
Mapping update = doc.dynamicMappingsUpdate();
assert update != null;
lastIntroducedFieldName.set(fieldName);
mapperService.merge("_doc", new CompressedXContent(update.toString()), MergeReason.MAPPING_UPDATE);
}
} catch (Exception e) {
error.set(e);
} finally {
stopped.set(true);
}
};
});
updater.start();
try {
barrier.await();
while(stopped.get() == false) {
final String fieldName = lastIntroducedFieldName.get();
final BytesReference source = new BytesArray("{ \"" + fieldName + "\" : \"test\" }");
ParsedDocument parsedDoc = documentMapper.parse(new SourceToParse("test",
"test",
"random",
source,
XContentType.JSON));
if (fieldName == null) {
continue;
}
ParsedDocument parsedDoc = mapperService.documentMapper().parse(source(b -> b.field(fieldName, "test")));
if (parsedDoc.dynamicMappingsUpdate() != null) {
// not in the mapping yet, try again
continue;
}
dfm = documentMapper.mappers();
assertNotNull(dfm.indexAnalyzer().tokenStream(fieldName, "foo"));
assertNotNull(mapperService.indexAnalyzer().tokenStream(fieldName, "foo"));
}
} finally {
stopped.set(true);
@ -226,133 +205,82 @@ public class DocumentMapperTests extends ESSingleNodeTestCase {
}
public void testDoNotRepeatOriginalMapping() throws IOException {
MergeReason reason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
CompressedXContent mapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("_source")
.field("enabled", false)
.endObject()
.endObject().endObject()));
MapperService mapperService = createIndex("test").mapperService();
mapperService.merge("type", mapping, reason);
MapperService mapperService
= createMapperService(topMapping(b -> b.startObject("_source").field("enabled", false).endObject()));
CompressedXContent update = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("foo")
.field("type", "text")
.endObject()
.endObject()
.endObject().endObject()));
DocumentMapper mapper = mapperService.merge("type", update, reason);
merge(mapperService, fieldMapping(b -> b.field("type", "text")));
assertNotNull(mapper.mappers().getMapper("foo"));
assertFalse(mapper.sourceMapper().enabled());
assertNotNull(mapperService.documentMapper().mappers().getMapper("field"));
assertFalse(mapperService.documentMapper().sourceMapper().enabled());
}
public void testMergeMetadataFieldsForIndexTemplates() throws IOException {
CompressedXContent mapping = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("_source")
.field("enabled", false)
.endObject()
.endObject().endObject()));
MapperService mapperService = createIndex("test").mapperService();
mapperService.merge("type", mapping, MergeReason.INDEX_TEMPLATE);
MapperService mapperService
= createMapperService(topMapping(b -> b.startObject("_source").field("enabled", false).endObject()));
CompressedXContent update = new CompressedXContent(BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("type")
.startObject("_source")
.field("enabled", true)
.endObject()
.endObject().endObject()));
DocumentMapper mapper = mapperService.merge("type", update, MergeReason.INDEX_TEMPLATE);
merge(mapperService, MergeReason.INDEX_TEMPLATE,
topMapping(b -> b.startObject("_source").field("enabled", true).endObject()));
DocumentMapper mapper = mapperService.documentMapper();
assertTrue(mapper.sourceMapper().enabled());
}
public void testMergeMeta() throws IOException {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String initMapping = Strings
.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("test")
.startObject("_meta")
.field("foo").value("bar")
.endObject()
.endObject()
.endObject());
DocumentMapper initMapper = parser.parse("test", new CompressedXContent(initMapping));
DocumentMapper initMapper
= createDocumentMapper(topMapping(b -> b.startObject("_meta").field("foo", "bar").endObject()));
assertThat(initMapper.meta().get("foo"), equalTo("bar"));
String updateMapping = Strings
.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("test")
.startObject("properties")
.startObject("name").field("type", "text").endObject()
.endObject()
.endObject()
.endObject());
DocumentMapper updatedMapper = parser.parse("test", new CompressedXContent(updateMapping));
DocumentMapper updatedMapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text")));
DocumentMapper mergedMapper = initMapper.merge(updatedMapper.mapping(), MergeReason.MAPPING_UPDATE);
assertThat(mergedMapper.meta().get("foo"), equalTo("bar"));
updateMapping = Strings
.toString(XContentFactory.jsonBuilder()
.startObject()
.startObject("test")
.startObject("_meta")
.field("foo").value("new_bar")
.endObject()
.endObject()
.endObject());
updatedMapper = parser.parse("test", new CompressedXContent(updateMapping));
updatedMapper
= createDocumentMapper(topMapping(b -> b.startObject("_meta").field("foo", "new_bar").endObject()));
mergedMapper = initMapper.merge(updatedMapper.mapping(), MergeReason.MAPPING_UPDATE);
assertThat(mergedMapper.meta().get("foo"), equalTo("new_bar"));
}
public void testMergeMetaForIndexTemplate() throws IOException {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String initMapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("_meta")
.field("field", "value")
.startObject("object")
.field("field1", "value1")
.field("field2", "value2")
.endObject()
.endObject()
.endObject());
DocumentMapper initMapper = parser.parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(initMapping));
DocumentMapper initMapper = createDocumentMapper(topMapping(b -> {
b.startObject("_meta");
{
b.field("field", "value");
b.startObject("object");
{
b.field("field1", "value1");
b.field("field2", "value2");
}
b.endObject();
}
b.endObject();
}));
Map<String, Object> expected = org.elasticsearch.common.collect.Map.of(
"field", "value",
"object", org.elasticsearch.common.collect.Map.of("field1", "value1", "field2", "value2"));
assertThat(initMapper.meta(), equalTo(expected));
String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("name").field("type", "text").endObject()
.endObject()
.endObject());
DocumentMapper updatedMapper = parser.parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(updateMapping));
DocumentMapper updatedMapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text")));
DocumentMapper mergedMapper = initMapper.merge(updatedMapper.mapping(), MergeReason.INDEX_TEMPLATE);
assertThat(mergedMapper.meta(), equalTo(expected));
updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("_meta")
.field("field", "value")
.startObject("object")
.field("field2", "new_value")
.field("field3", "value3")
.endObject()
.endObject()
.endObject());
updatedMapper = parser.parse(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(updateMapping));
updatedMapper = createDocumentMapper(topMapping(b -> {
b.startObject("_meta");
{
b.field("field", "value");
b.startObject("object");
{
b.field("field2", "new_value");
b.field("field3", "value3");
}
b.endObject();
}
b.endObject();
}));
mergedMapper = mergedMapper.merge(updatedMapper.mapping(), MergeReason.INDEX_TEMPLATE);
expected = org.elasticsearch.common.collect.Map.of(

View File

@ -18,80 +18,55 @@
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.test.ESSingleNodeTestCase;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
public class DoubleIndexingDocTests extends ESSingleNodeTestCase {
public class DoubleIndexingDocTests extends MapperServiceTestCase {
public void testDoubleIndexingSameDoc() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), Lucene.STANDARD_ANALYZER));
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject()
.endObject().endObject());
IndexService index = createIndex("test");
client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping, XContentType.JSON).get();
MapperService mapperService = index.mapperService();
DocumentMapper mapper = mapperService.documentMapper();
MapperService mapperService = createMapperService(mapping(b -> {}));
QueryShardContext context = index.newQueryShardContext(0, null, () -> 0L, null);
ParsedDocument doc = mapper.parse(new SourceToParse("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field1", "value1")
.field("field2", 1)
.field("field3", 1.1)
.field("field4", "2010-01-01")
.startArray("field5").value(1).value(2).value(3).endArray()
.endObject()),
XContentType.JSON));
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> {
b.field("field1", "value1");
b.field("field2", 1);
b.field("field3", 1.1);
b.field("field4", "2010-01-01");
b.startArray("field5").value(1).value(2).value(3).endArray();
}));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("type")
.setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get();
merge(mapperService, dynamicMapping(doc.dynamicMappingsUpdate()));
writer.addDocument(doc.rootDoc());
writer.addDocument(doc.rootDoc());
QueryShardContext qsc = mock(QueryShardContext.class);
IndexReader reader = DirectoryReader.open(writer);
IndexSearcher searcher = new IndexSearcher(reader);
withLuceneIndex(mapperService, iw -> {
iw.addDocument(doc.rootDoc());
iw.addDocument(doc.rootDoc());
}, reader -> {
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(mapperService.fieldType("field1").termQuery("value1", qsc), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
TopDocs topDocs = searcher.search(mapperService.fieldType("field1").termQuery("value1", context), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field2").termQuery("1", qsc), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field2").termQuery("1", context), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field3").termQuery("1.1", qsc), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field3").termQuery("1.1", context), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field4").termQuery("2010-01-01", qsc), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field4").termQuery("2010-01-01", context), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("1", qsc), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("1", context), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("2", qsc), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("2", context), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("3", context), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
writer.close();
reader.close();
dir.close();
topDocs = searcher.search(mapperService.fieldType("field5").termQuery("3", qsc), 10);
assertThat(topDocs.totalHits.value, equalTo(2L));
});
}
}

View File

@ -94,6 +94,12 @@ public abstract class MapperServiceTestCase extends ESTestCase {
return createMapperService(mappings).documentMapper();
}
protected final DocumentMapper createDocumentMapper(String type, String mappings) throws IOException {
MapperService mapperService = createMapperService(mapping(b -> {}));
merge(type, mapperService, mappings);
return mapperService.documentMapper();
}
protected final MapperService createMapperService(XContentBuilder mappings) throws IOException {
return createMapperService(getIndexSettings(), mappings);
}
@ -158,7 +164,29 @@ public abstract class MapperServiceTestCase extends ESTestCase {
* Merge a new mapping into the one in the provided {@link MapperService}.
*/
protected final void merge(MapperService mapperService, XContentBuilder mapping) throws IOException {
mapperService.merge("_doc", new CompressedXContent(BytesReference.bytes(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
merge(mapperService, MapperService.MergeReason.MAPPING_UPDATE, mapping);
}
/**
* Merge a new mapping into the one in the provided {@link MapperService}.
*/
protected final void merge(String type, MapperService mapperService, String mapping) throws IOException {
mapperService.merge(type, new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
}
/**
* Merge a new mapping into the one in the provided {@link MapperService} with a specific {@code MergeReason}
*/
protected final void merge(MapperService mapperService,
MapperService.MergeReason reason,
XContentBuilder mapping) throws IOException {
mapperService.merge("_doc", new CompressedXContent(BytesReference.bytes(mapping)), reason);
}
protected final XContentBuilder topMapping(CheckedConsumer<XContentBuilder, IOException> buildFields) throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject("_doc");
buildFields.accept(builder);
return builder.endObject().endObject();
}
protected final XContentBuilder mapping(CheckedConsumer<XContentBuilder, IOException> buildFields) throws IOException {
@ -167,6 +195,12 @@ public abstract class MapperServiceTestCase extends ESTestCase {
return builder.endObject().endObject().endObject();
}
protected final XContentBuilder dynamicMapping(Mapping dynamicMapping) throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
dynamicMapping.toXContent(builder, ToXContent.EMPTY_PARAMS);
return builder.endObject();
}
protected final XContentBuilder fieldMapping(CheckedConsumer<XContentBuilder, IOException> buildField) throws IOException {
return mapping(b -> {
b.startObject("field");