Prohibit indexing a document with parent for a type that doesn't have a `_parent` field configured and prohibit adding a _parent field to an existing mapping.
Closes #3848 #3849
This commit is contained in:
parent
89de3ab627
commit
cc9ab111a0
|
@ -578,6 +578,14 @@ public class IndexRequest extends ShardReplicationOperationRequest<IndexRequest>
|
|||
if (mappingMd.routing().required() && routing == null) {
|
||||
throw new RoutingMissingException(index, type, id);
|
||||
}
|
||||
|
||||
if (parent != null && !mappingMd.hasParentField()) {
|
||||
throw new ElasticSearchIllegalArgumentException("Can't specify parent if no parent field has been configured");
|
||||
}
|
||||
} else {
|
||||
if (parent != null) {
|
||||
throw new ElasticSearchIllegalArgumentException("Can't specify parent if no parent field has been configured");
|
||||
}
|
||||
}
|
||||
|
||||
// generate id if not already provided and id generation is allowed
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.TimestampParsingException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -258,6 +259,7 @@ public class MappingMetaData {
|
|||
private Id id;
|
||||
private Routing routing;
|
||||
private Timestamp timestamp;
|
||||
private boolean hasParentField;
|
||||
|
||||
public MappingMetaData(DocumentMapper docMapper) {
|
||||
this.type = docMapper.type();
|
||||
|
@ -265,6 +267,7 @@ public class MappingMetaData {
|
|||
this.id = new Id(docMapper.idFieldMapper().path());
|
||||
this.routing = new Routing(docMapper.routingFieldMapper().required(), docMapper.routingFieldMapper().path());
|
||||
this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().path(), docMapper.timestampFieldMapper().dateTimeFormatter().format());
|
||||
this.hasParentField = docMapper.parentFieldMapper().active();
|
||||
}
|
||||
|
||||
public MappingMetaData(CompressedString mapping) throws IOException {
|
||||
|
@ -344,14 +347,20 @@ public class MappingMetaData {
|
|||
} else {
|
||||
this.timestamp = Timestamp.EMPTY;
|
||||
}
|
||||
if (withoutType.containsKey("_parent")) {
|
||||
this.hasParentField = true;
|
||||
} else {
|
||||
this.hasParentField = false;
|
||||
}
|
||||
}
|
||||
|
||||
public MappingMetaData(String type, CompressedString source, Id id, Routing routing, Timestamp timestamp) {
|
||||
public MappingMetaData(String type, CompressedString source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) {
|
||||
this.type = type;
|
||||
this.source = source;
|
||||
this.id = id;
|
||||
this.routing = routing;
|
||||
this.timestamp = timestamp;
|
||||
this.hasParentField = hasParentField;
|
||||
}
|
||||
|
||||
void updateDefaultMapping(MappingMetaData defaultMapping) {
|
||||
|
@ -374,6 +383,10 @@ public class MappingMetaData {
|
|||
return this.source;
|
||||
}
|
||||
|
||||
public boolean hasParentField() {
|
||||
return hasParentField;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the serialized compressed form of the mappings into a parsed map.
|
||||
*/
|
||||
|
@ -516,6 +529,9 @@ public class MappingMetaData {
|
|||
out.writeBoolean(false);
|
||||
}
|
||||
out.writeString(mappingMd.timestamp().format());
|
||||
if (out.getVersion().onOrAfter(Version.V_0_90_6)) {
|
||||
out.writeBoolean(mappingMd.hasParentField());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -553,7 +569,13 @@ public class MappingMetaData {
|
|||
Routing routing = new Routing(in.readBoolean(), in.readBoolean() ? in.readString() : null);
|
||||
// timestamp
|
||||
Timestamp timestamp = new Timestamp(in.readBoolean(), in.readBoolean() ? in.readString() : null, in.readString());
|
||||
return new MappingMetaData(type, source, id, routing, timestamp);
|
||||
final boolean hasParentField;
|
||||
if (in.getVersion().onOrAfter(Version.V_0_90_6)) {
|
||||
hasParentField = in.readBoolean();
|
||||
} else {
|
||||
hasParentField = true; // We assume here that the type has a parent field, which is confirm with the behaviour of <= 0.90.5
|
||||
}
|
||||
return new MappingMetaData(type, source, id, routing, timestamp, hasParentField);
|
||||
}
|
||||
|
||||
public static class ParseContext {
|
||||
|
|
|
@ -123,7 +123,7 @@ public class SimpleIdCache extends AbstractIndexComponent implements IdCache, Se
|
|||
BytesRef spare = new BytesRef();
|
||||
for (String type : indexService.mapperService().types()) {
|
||||
ParentFieldMapper parentFieldMapper = indexService.mapperService().documentMapper(type).parentFieldMapper();
|
||||
if (parentFieldMapper != null) {
|
||||
if (parentFieldMapper.active()) {
|
||||
parentTypes.add(new HashedBytesArray(Strings.toUTF8Bytes(parentFieldMapper.type(), spare)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -226,7 +226,7 @@ public class ShardGetService extends AbstractIndexShardComponent {
|
|||
Object value = null;
|
||||
if (field.equals(RoutingFieldMapper.NAME) && docMapper.routingFieldMapper().fieldType().stored()) {
|
||||
value = source.routing;
|
||||
} else if (field.equals(ParentFieldMapper.NAME) && docMapper.parentFieldMapper() != null && docMapper.parentFieldMapper().fieldType().stored()) {
|
||||
} else if (field.equals(ParentFieldMapper.NAME) && docMapper.parentFieldMapper().active() && docMapper.parentFieldMapper().fieldType().stored()) {
|
||||
value = source.parent;
|
||||
} else if (field.equals(TimestampFieldMapper.NAME) && docMapper.timestampFieldMapper().fieldType().stored()) {
|
||||
value = source.timestamp;
|
||||
|
|
|
@ -177,7 +177,7 @@ public class DocumentMapper implements ToXContent {
|
|||
this.rootMappers.put(TimestampFieldMapper.class, new TimestampFieldMapper());
|
||||
this.rootMappers.put(TTLFieldMapper.class, new TTLFieldMapper());
|
||||
this.rootMappers.put(VersionFieldMapper.class, new VersionFieldMapper());
|
||||
// don't add parent field, by default its "null"
|
||||
this.rootMappers.put(ParentFieldMapper.class, new ParentFieldMapper());
|
||||
}
|
||||
|
||||
public Builder meta(ImmutableMap<String, Object> meta) {
|
||||
|
@ -306,7 +306,7 @@ public class DocumentMapper implements ToXContent {
|
|||
|
||||
this.typeFilter = typeMapper().termFilter(type, null);
|
||||
|
||||
if (rootMapper(ParentFieldMapper.class) != null) {
|
||||
if (rootMapper(ParentFieldMapper.class).active()) {
|
||||
// mark the routing field mapper as required
|
||||
rootMapper(RoutingFieldMapper.class).markAsRequired();
|
||||
}
|
||||
|
@ -631,8 +631,9 @@ public class DocumentMapper implements ToXContent {
|
|||
|
||||
public synchronized MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) {
|
||||
MergeContext mergeContext = new MergeContext(this, mergeFlags);
|
||||
rootObjectMapper.merge(mergeWith.rootObjectMapper, mergeContext);
|
||||
assert rootMappers.size() == mergeWith.rootMappers.size();
|
||||
|
||||
rootObjectMapper.merge(mergeWith.rootObjectMapper, mergeContext);
|
||||
for (Map.Entry<Class<? extends RootMapper>, RootMapper> entry : rootMappers.entrySet()) {
|
||||
// root mappers included in root object will get merge in the rootObjectMapper
|
||||
if (entry.getValue().includeInObject()) {
|
||||
|
|
|
@ -130,6 +130,13 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
|
|||
this.typeAsBytes = new BytesRef(type);
|
||||
}
|
||||
|
||||
public ParentFieldMapper() {
|
||||
super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE),
|
||||
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, null, null, null);
|
||||
type = null;
|
||||
typeAsBytes = null;
|
||||
}
|
||||
|
||||
public String type() {
|
||||
return type;
|
||||
}
|
||||
|
@ -169,6 +176,10 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
|
|||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
if (!active()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (context.parser().currentName() != null && context.parser().currentName().equals(Defaults.NAME)) {
|
||||
// we are in the parsing of _parent phase
|
||||
String parentId = context.parser().text();
|
||||
|
@ -253,7 +264,7 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
|
|||
|
||||
List<String> types = new ArrayList<String>(context.mapperService().types().size());
|
||||
for (DocumentMapper documentMapper : context.mapperService()) {
|
||||
if (documentMapper.parentFieldMapper() == null) {
|
||||
if (!documentMapper.parentFieldMapper().active()) {
|
||||
types.add(documentMapper.type());
|
||||
}
|
||||
}
|
||||
|
@ -284,7 +295,7 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
|
|||
|
||||
List<String> types = new ArrayList<String>(context.mapperService().types().size());
|
||||
for (DocumentMapper documentMapper : context.mapperService()) {
|
||||
if (documentMapper.parentFieldMapper() == null) {
|
||||
if (!documentMapper.parentFieldMapper().active()) {
|
||||
types.add(documentMapper.type());
|
||||
}
|
||||
}
|
||||
|
@ -319,6 +330,10 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (!active()) {
|
||||
return builder;
|
||||
}
|
||||
|
||||
builder.startObject(CONTENT_TYPE);
|
||||
builder.field("type", type);
|
||||
builder.endObject();
|
||||
|
@ -327,6 +342,21 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
|
|||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
|
||||
// do nothing here, no merging, but also no exception
|
||||
ParentFieldMapper other = (ParentFieldMapper) mergeWith;
|
||||
if (active() == other.active()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (active() != other.active() || !type.equals(other.type)) {
|
||||
mergeContext.addConflict("The _parent field can't be added or updated");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Whether the _parent field is actually used.
|
||||
*/
|
||||
public boolean active() {
|
||||
return type != null;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -121,7 +121,7 @@ public class HasChildFilterParser implements FilterParser {
|
|||
if (childDocMapper == null) {
|
||||
throw new QueryParsingException(parseContext.index(), "No mapping for for type [" + childType + "]");
|
||||
}
|
||||
if (childDocMapper.parentFieldMapper() == null) {
|
||||
if (!childDocMapper.parentFieldMapper().active()) {
|
||||
throw new QueryParsingException(parseContext.index(), "Type [" + childType + "] does not have parent mapping");
|
||||
}
|
||||
String parentType = childDocMapper.parentFieldMapper().type();
|
||||
|
|
|
@ -123,7 +123,7 @@ public class HasChildQueryParser implements QueryParser {
|
|||
if (childDocMapper == null) {
|
||||
throw new QueryParsingException(parseContext.index(), "[has_child] No mapping for for type [" + childType + "]");
|
||||
}
|
||||
if (childDocMapper.parentFieldMapper() == null) {
|
||||
if (!childDocMapper.parentFieldMapper().active()) {
|
||||
throw new QueryParsingException(parseContext.index(), "[has_child] Type [" + childType + "] does not have parent mapping");
|
||||
}
|
||||
String parentType = childDocMapper.parentFieldMapper().type();
|
||||
|
|
|
@ -135,7 +135,7 @@ public class HasParentFilterParser implements FilterParser {
|
|||
parentTypes.add(parentType);
|
||||
for (DocumentMapper documentMapper : parseContext.mapperService()) {
|
||||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||
if (parentFieldMapper != null) {
|
||||
if (parentFieldMapper.active()) {
|
||||
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
|
||||
if (parentTypeDocumentMapper == null) {
|
||||
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
|
||||
|
|
|
@ -134,7 +134,7 @@ public class HasParentQueryParser implements QueryParser {
|
|||
parentTypes.add(parentType);
|
||||
for (DocumentMapper documentMapper : parseContext.mapperService()) {
|
||||
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
|
||||
if (parentFieldMapper != null) {
|
||||
if (parentFieldMapper.active()) {
|
||||
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
|
||||
if (parentTypeDocumentMapper == null) {
|
||||
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
|
||||
|
|
|
@ -120,7 +120,7 @@ public class TopChildrenQueryParser implements QueryParser {
|
|||
if (childDocMapper == null) {
|
||||
throw new QueryParsingException(parseContext.index(), "No mapping for for type [" + childType + "]");
|
||||
}
|
||||
if (childDocMapper.parentFieldMapper() == null) {
|
||||
if (!childDocMapper.parentFieldMapper().active()) {
|
||||
throw new QueryParsingException(parseContext.index(), "Type [" + childType + "] does not have parent mapping");
|
||||
}
|
||||
String parentType = childDocMapper.parentFieldMapper().type();
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.test.ElasticsearchTestCase;
|
|||
import org.junit.Test;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
|
@ -37,7 +36,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "1");
|
||||
|
@ -55,7 +54,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startArray("id").value("id").endArray().field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, "routing_value", "1");
|
||||
|
@ -82,7 +81,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext("id", null, "1");
|
||||
|
@ -100,7 +99,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext("id", "routing_value1", null);
|
||||
|
@ -118,7 +117,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "routing"),
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject().bytes().toBytes();
|
||||
MappingMetaData.ParseContext parseContext = md.createParseContext(null, null, null);
|
||||
|
@ -133,7 +132,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("id", "id").field("routing", "routing_value").endObject()
|
||||
|
@ -151,7 +150,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("id", "id").field("routing", "routing_value").endObject()
|
||||
|
@ -172,7 +171,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("id", "id").field("routing", "routing_value").endObject()
|
||||
|
@ -193,7 +192,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "obj2.timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("routing", "routing_value").endObject()
|
||||
|
@ -214,7 +213,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("id", "id").field("routing", "routing_value").field("timestamp", "1").endObject()
|
||||
|
@ -232,7 +231,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("obj1.obj0.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.obj2.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj1.obj3.timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "obj1.obj3.timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1")
|
||||
|
@ -261,7 +260,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("obj1.id"),
|
||||
new MappingMetaData.Routing(true, "obj1.routing"),
|
||||
new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "obj1.timestamp", "dateOptionalTime"), false);
|
||||
byte[] bytes = jsonBuilder().startObject().field("field1", "value1").field("field2", "value2")
|
||||
.startObject("obj0").field("field1", "value1").field("field2", "value2").endObject()
|
||||
.startObject("obj1").field("id", "id").endObject()
|
||||
|
@ -281,7 +280,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("field1"),
|
||||
new MappingMetaData.Routing(true, "field1.field1"),
|
||||
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime"), false);
|
||||
|
||||
byte[] bytes = jsonBuilder().startObject()
|
||||
.field("aaa", "wr")
|
||||
|
@ -304,7 +303,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id("id"),
|
||||
new MappingMetaData.Routing(true, "field1.field1.field2"),
|
||||
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime"), false);
|
||||
|
||||
byte[] bytes = jsonBuilder().startObject()
|
||||
.field("aaa", "wr")
|
||||
|
@ -327,7 +326,7 @@ public class MappingMetaDataParserTests extends ElasticsearchTestCase {
|
|||
MappingMetaData md = new MappingMetaData("type1", new CompressedString(""),
|
||||
new MappingMetaData.Id(null),
|
||||
new MappingMetaData.Routing(true, "field1.field2"),
|
||||
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime"));
|
||||
new MappingMetaData.Timestamp(true, "field1", "dateOptionalTime"), false);
|
||||
|
||||
byte[] bytes = jsonBuilder().startObject()
|
||||
.field("aaa", "wr")
|
||||
|
|
|
@ -24,8 +24,8 @@ import org.elasticsearch.index.mapper.*;
|
|||
import org.elasticsearch.test.ElasticsearchTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -46,8 +46,8 @@ public class ParentMappingTests extends ElasticsearchTestCase {
|
|||
.bytes()).type("type").id("1"));
|
||||
|
||||
// no _parent mapping, used as a simple field
|
||||
assertThat(doc.parent(), equalTo(null));
|
||||
assertThat(doc.rootDoc().get("_parent"), equalTo("1122"));
|
||||
assertThat(doc.parent(), nullValue());
|
||||
assertThat(doc.rootDoc().get("_parent"), nullValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -20,6 +20,9 @@
|
|||
package org.elasticsearch.search.child;
|
||||
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.action.count.CountResponse;
|
||||
import org.elasticsearch.action.explain.ExplainResponse;
|
||||
|
@ -30,6 +33,7 @@ import org.elasticsearch.action.search.ShardSearchFailure;
|
|||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.facet.terms.TermsFacet;
|
||||
|
@ -1832,4 +1836,65 @@ public class SimpleChildQuerySearchTests extends AbstractIntegrationTest {
|
|||
assertHitCount(searchResponse, 0l);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void indexChildDocWithNoParentMapping() throws ElasticSearchException, IOException {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.setSettings(
|
||||
ImmutableSettings.settingsBuilder()
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 0)
|
||||
).execute().actionGet();
|
||||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
|
||||
client().admin().indices().preparePutMapping("test").setType("child1").setSource(
|
||||
jsonBuilder().startObject().startObject("type").endObject().endObject()
|
||||
).execute().actionGet();
|
||||
|
||||
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1", "_parent", "bla").execute().actionGet();
|
||||
try {
|
||||
client().prepareIndex("test", "child1", "c1").setParent("p1").setSource("c_field", "blue").execute().actionGet();
|
||||
fail();
|
||||
} catch (ElasticSearchIllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("Can't specify parent if no parent field has been configured"));
|
||||
}
|
||||
try {
|
||||
client().prepareIndex("test", "child2", "c2").setParent("p1").setSource("c_field", "blue").execute().actionGet();
|
||||
fail();
|
||||
} catch (ElasticSearchIllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("Can't specify parent if no parent field has been configured"));
|
||||
}
|
||||
|
||||
refresh();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddingParentToExistingMapping() throws ElasticSearchException, IOException {
|
||||
client().admin().indices().prepareCreate("test")
|
||||
.setSettings(
|
||||
ImmutableSettings.settingsBuilder()
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 0)
|
||||
).execute().actionGet();
|
||||
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
|
||||
|
||||
PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping("test").setType("child").setSource("number", "type=integer")
|
||||
.execute().actionGet();
|
||||
assertThat(putMappingResponse.isAcknowledged(), equalTo(true));
|
||||
|
||||
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").execute().actionGet();
|
||||
Map<String, Object> mapping = getMappingsResponse.getMappings().get("test").get("child").getSourceAsMap();
|
||||
assertThat(mapping.size(), equalTo(1));
|
||||
assertThat(mapping.get("properties"), notNullValue());
|
||||
|
||||
try {
|
||||
// Adding _parent metadata field to existing mapping is prohibited:
|
||||
client().admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_parent").field("type", "parent").endObject()
|
||||
.endObject().endObject()).execute().actionGet();
|
||||
fail();
|
||||
} catch (MergeMappingException e) {
|
||||
assertThat(e.getMessage(), equalTo("Merge failed with failures {[The _parent field can't be added or updated]}"));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue