Introduce ParentJoinFieldMapper, a field mapper that creates parent/child relation within documents of the same index ()

* Introduce ParentJoinFieldMapper, a field mapper that creates parent/child relation within documents of the same index

This change adds a new field mapper named ParentJoinFieldMapper. This mapper is a replacement for the ParentFieldMapper but instead of using the types in the mapping
it uses an internal field to materialize parent/child relation within a single index.
This change also adds a fetch sub phase that automatically retrieves the join name (parent or child name) and the parent id for child documents in the response hit fields.
The compatibility with `has_parent`, `has_child` queries and `children` agg will be added in a follow up.

Relates 
This commit is contained in:
Jim Ferenczi 2017-05-31 18:07:21 +02:00 committed by GitHub
parent 90a5574c93
commit b5d62ae747
8 changed files with 1276 additions and 1 deletions

@ -20,18 +20,24 @@
package org.elasticsearch.join;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder;
import org.elasticsearch.join.aggregations.InternalChildren;
import org.elasticsearch.join.fetch.ParentJoinFieldSubFetchPhase;
import org.elasticsearch.join.mapper.ParentJoinFieldMapper;
import org.elasticsearch.join.query.HasChildQueryBuilder;
import org.elasticsearch.join.query.HasParentQueryBuilder;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.fetch.FetchSubPhase;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class ParentJoinPlugin extends Plugin implements SearchPlugin {
public class ParentJoinPlugin extends Plugin implements SearchPlugin, MapperPlugin {
public ParentJoinPlugin(Settings settings) {}
@Override
@ -50,5 +56,13 @@ public class ParentJoinPlugin extends Plugin implements SearchPlugin {
);
}
@Override
public Map<String, Mapper.TypeParser> getMappers() {
return Collections.singletonMap(ParentJoinFieldMapper.CONTENT_TYPE, new ParentJoinFieldMapper.TypeParser());
}
@Override
public List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
return Collections.singletonList(new ParentJoinFieldSubFetchPhase());
}
}

@ -0,0 +1,102 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.fetch;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.join.mapper.ParentJoinFieldMapper;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* A sub fetch phase that retrieves the join name and the parent id for each document containing
* a {@link ParentJoinFieldMapper} field.
*/
public final class ParentJoinFieldSubFetchPhase implements FetchSubPhase {
@Override
public void hitExecute(SearchContext context, HitContext hitContext) {
if (context.storedFieldsContext() != null && context.storedFieldsContext().fetchFields() == false) {
return;
}
if (context.mapperService().getIndexSettings().getIndexVersionCreated().before(Version.V_6_0_0_alpha2)) {
return;
}
DocumentMapper docMapper = context.mapperService().documentMapper(hitContext.hit().getType());
Tuple<String, String> joinField = null;
Tuple<String, String> parentField = null;
for (FieldMapper fieldMapper : docMapper.mappers()) {
if (fieldMapper instanceof ParentJoinFieldMapper) {
String joinName = getSortedDocValue(fieldMapper.name(), hitContext.reader(), hitContext.docId());
if (joinName != null) {
ParentJoinFieldMapper joinFieldMapper = (ParentJoinFieldMapper) fieldMapper;
joinField = new Tuple<>(fieldMapper.name(), joinName);
// we retrieve the parent id only for children.
FieldMapper parentMapper = joinFieldMapper.getParentIdFieldMapper(joinName, false);
if (parentMapper != null) {
String parent = getSortedDocValue(parentMapper.name(), hitContext.reader(), hitContext.docId());
parentField = new Tuple<>(parentMapper.name(), parent);
}
break;
}
}
}
if (joinField == null) {
// hit has no join field.
return;
}
Map<String, SearchHitField> fields = hitContext.hit().fieldsOrNull();
if (fields == null) {
fields = new HashMap<>();
hitContext.hit().fields(fields);
}
fields.put(joinField.v1(), new SearchHitField(joinField.v1(), Collections.singletonList(joinField.v2())));
if (parentField != null) {
fields.put(parentField.v1(), new SearchHitField(parentField.v1(), Collections.singletonList(parentField.v2())));
}
}
private String getSortedDocValue(String field, LeafReader reader, int docId) {
try {
SortedDocValues docValues = reader.getSortedDocValues(field);
if (docValues == null || docValues.advanceExact(docId) == false) {
return null;
}
int ord = docValues.ordValue();
BytesRef joinName = docValues.lookupOrd(ord);
return joinName.utf8ToString();
} catch (IOException e) {
throw ExceptionsHelper.convertToElastic(e);
}
}
}

@ -0,0 +1,174 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.StringFieldType;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
/**
* A field mapper used internally by the {@link ParentJoinFieldMapper} to index
* the value that link documents in the index (parent _id or _id if the document is a parent).
*/
public final class ParentIdFieldMapper extends FieldMapper {
static final String CONTENT_TYPE = "parent";
static class Defaults {
public static final MappedFieldType FIELD_TYPE = new ParentIdFieldType();
static {
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setHasDocValues(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.freeze();
}
}
static class Builder extends FieldMapper.Builder<Builder, ParentIdFieldMapper> {
private final String parent;
private final Set<String> children;
Builder(String name, String parent, Set<String> children) {
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
builder = this;
this.parent = parent;
this.children = children;
}
public Set<String> getChildren() {
return children;
}
@Override
public ParentIdFieldMapper build(BuilderContext context) {
fieldType.setName(name);
return new ParentIdFieldMapper(name, parent, children, fieldType, context.indexSettings());
}
}
public static final class ParentIdFieldType extends StringFieldType {
public ParentIdFieldType() {
setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
}
protected ParentIdFieldType(ParentIdFieldType ref) {
super(ref);
}
public ParentIdFieldType clone() {
return new ParentIdFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder();
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
return null;
}
BytesRef binaryValue = (BytesRef) value;
return binaryValue.utf8ToString();
}
}
private final String parentName;
private Set<String> children;
protected ParentIdFieldMapper(String simpleName,
String parentName,
Set<String> children,
MappedFieldType fieldType,
Settings indexSettings) {
super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, MultiFields.empty(), null);
this.parentName = parentName;
this.children = children;
}
@Override
protected ParentIdFieldMapper clone() {
return (ParentIdFieldMapper) super.clone();
}
/**
* Returns the parent name associated with this mapper.
*/
public String getParentName() {
return parentName;
}
/**
* Returns the children names associated with this mapper.
*/
public Collection<String> getChildren() {
return children;
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
if (context.externalValueSet() == false) {
throw new IllegalStateException("external value not set");
}
String refId = (String) context.externalValue();
BytesRef binaryValue = new BytesRef(refId);
Field field = new Field(fieldType().name(), binaryValue, fieldType());
fields.add(field);
fields.add(new SortedDocValuesField(fieldType().name(), binaryValue));
}
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
super.doMerge(mergeWith, updateAllTypes);
ParentIdFieldMapper parentMergeWith = (ParentIdFieldMapper) mergeWith;
this.children = parentMergeWith.children;
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
}

@ -0,0 +1,408 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.StringFieldType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* A {@link FieldMapper} that creates hierarchical joins (parent-join) between documents in the same index.
* TODO Should be restricted to a single join field per index
*/
public final class ParentJoinFieldMapper extends FieldMapper {
public static final String NAME = "join";
public static final String CONTENT_TYPE = "join";
public static class Defaults {
public static final MappedFieldType FIELD_TYPE = new JoinFieldType();
static {
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setHasDocValues(true);
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.freeze();
}
}
static String getParentIdFieldName(String joinFieldName, String parentName) {
return joinFieldName + "#" + parentName;
}
static void checkPreConditions(Version indexCreatedVersion, ContentPath path, String name) {
if (indexCreatedVersion.before(Version.V_6_0_0_alpha2)) {
throw new IllegalStateException("unable to create join field [" + name +
"] for index created before " + Version.V_6_0_0_alpha2);
}
if (path.pathAsText(name).contains(".")) {
throw new IllegalArgumentException("join field [" + path.pathAsText(name) + "] " +
"cannot be added inside an object or in a multi-field");
}
}
static void checkParentFields(String name, List<ParentIdFieldMapper> mappers) {
Set<String> children = new HashSet<>();
List<String> conflicts = new ArrayList<>();
for (ParentIdFieldMapper mapper : mappers) {
for (String child : mapper.getChildren()) {
if (children.add(child) == false) {
conflicts.add("[" + child + "] cannot have multiple parents");
}
}
}
if (conflicts.isEmpty() == false) {
throw new IllegalArgumentException("invalid definition for join field [" + name + "]:\n" + conflicts.toString());
}
}
static void checkDuplicateJoinFields(ParseContext.Document doc) {
if (doc.getFields().stream().anyMatch((m) -> m.fieldType() instanceof JoinFieldType)) {
throw new IllegalStateException("cannot have two join fields in the same document");
}
}
public static class Builder extends FieldMapper.Builder<Builder, ParentJoinFieldMapper> {
final List<ParentIdFieldMapper.Builder> parentIdFieldBuilders = new ArrayList<>();
public Builder(String name) {
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
builder = this;
}
@Override
public JoinFieldType fieldType() {
return (JoinFieldType) super.fieldType();
}
public Builder addParent(String parent, Set<String> children) {
String parentIdFieldName = getParentIdFieldName(name, parent);
parentIdFieldBuilders.add(new ParentIdFieldMapper.Builder(parentIdFieldName, parent, children));
return this;
}
@Override
public ParentJoinFieldMapper build(BuilderContext context) {
checkPreConditions(context.indexCreatedVersion(), context.path(), name);
fieldType.setName(name);
final List<ParentIdFieldMapper> parentIdFields = new ArrayList<>();
parentIdFieldBuilders.stream().map((e) -> e.build(context)).forEach(parentIdFields::add);
checkParentFields(name(), parentIdFields);
return new ParentJoinFieldMapper(name, fieldType, context.indexSettings(),
Collections.unmodifiableList(parentIdFields));
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
final IndexSettings indexSettings = parserContext.mapperService().getIndexSettings();
if (indexSettings.getIndexMetaData().isRoutingPartitionedIndex()) {
throw new IllegalStateException("cannot set join field [" + name + "] for the partitioned index " +
"[" + indexSettings.getIndex().getName() + "]");
}
Builder builder = new Builder(name);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
if ("type".equals(entry.getKey())) {
continue;
}
final String parent = entry.getKey();
Set<String> children;
if (entry.getValue() instanceof List) {
children = new HashSet<>();
for (Object childObj : (List) entry.getValue()) {
if (childObj instanceof String) {
children.add(childObj.toString());
} else {
throw new MapperParsingException("[" + parent + "] expected an array of strings but was:" +
childObj.getClass().getSimpleName());
}
}
children = Collections.unmodifiableSet(children);
} else if (entry.getValue() instanceof String) {
children = Collections.singleton(entry.getValue().toString());
} else {
throw new MapperParsingException("[" + parent + "] expected string but was:" +
entry.getValue().getClass().getSimpleName());
}
builder.addParent(parent, children);
iterator.remove();
}
return builder;
}
}
public static final class JoinFieldType extends StringFieldType {
public JoinFieldType() {
setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
}
protected JoinFieldType(JoinFieldType ref) {
super(ref);
}
public JoinFieldType clone() {
return new JoinFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder();
}
@Override
public Object valueForDisplay(Object value) {
if (value == null) {
return null;
}
BytesRef binaryValue = (BytesRef) value;
return binaryValue.utf8ToString();
}
}
private List<ParentIdFieldMapper> parentIdFields;
protected ParentJoinFieldMapper(String simpleName,
MappedFieldType fieldType,
Settings indexSettings,
List<ParentIdFieldMapper> parentIdFields) {
super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, MultiFields.empty(), null);
this.parentIdFields = parentIdFields;
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected ParentJoinFieldMapper clone() {
return (ParentJoinFieldMapper) super.clone();
}
@Override
public JoinFieldType fieldType() {
return (JoinFieldType) super.fieldType();
}
@Override
public Iterator<Mapper> iterator() {
return parentIdFields.stream().map((field) -> (Mapper) field).iterator();
}
/**
* Returns true if <code>name</code> is a parent name in the field.
*/
public boolean hasParent(String name) {
return parentIdFields.stream().anyMatch((mapper) -> name.equals(mapper.getParentName()));
}
/**
* Returns true if <code>name</code> is a child name in the field.
*/
public boolean hasChild(String name) {
return parentIdFields.stream().anyMatch((mapper) -> mapper.getChildren().contains(name));
}
/**
* Returns the parent Id field mapper associated with a parent <code>name</code>
* if <code>isParent</code> is true and a child <code>name</code> otherwise.
*/
public ParentIdFieldMapper getParentIdFieldMapper(String name, boolean isParent) {
for (ParentIdFieldMapper mapper : parentIdFields) {
if (isParent && name.equals(mapper.getParentName())) {
return mapper;
} else if (isParent == false && mapper.getChildren().contains(name)) {
return mapper;
}
}
return null;
}
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
super.doMerge(mergeWith, updateAllTypes);
ParentJoinFieldMapper joinMergeWith = (ParentJoinFieldMapper) mergeWith;
List<String> conflicts = new ArrayList<>();
for (ParentIdFieldMapper mapper : parentIdFields) {
if (joinMergeWith.getParentIdFieldMapper(mapper.getParentName(), true) == null) {
conflicts.add("cannot remove parent [" + mapper.getParentName() + "] in join field [" + name() + "]");
}
}
final List<ParentIdFieldMapper> newParentIdFields = new ArrayList<>();
for (ParentIdFieldMapper mergeWithMapper : joinMergeWith.parentIdFields) {
ParentIdFieldMapper self = getParentIdFieldMapper(mergeWithMapper.getParentName(), true);
if (self == null) {
if (getParentIdFieldMapper(mergeWithMapper.getParentName(), false) != null) {
// it is forbidden to add a parent to an existing child
conflicts.add("cannot create parent [" + mergeWithMapper.getParentName() + "] from an existing child");
}
for (String child : mergeWithMapper.getChildren()) {
if (getParentIdFieldMapper(child, true) != null) {
// it is forbidden to add a parent to an existing child
conflicts.add("cannot create child [" + child + "] from an existing parent");
}
}
newParentIdFields.add(mergeWithMapper);
} else {
for (String child : self.getChildren()) {
if (mergeWithMapper.getChildren().contains(child) == false) {
conflicts.add("cannot remove child [" + child + "] in join field [" + name() + "]");
}
}
ParentIdFieldMapper merged = (ParentIdFieldMapper) self.merge(mergeWithMapper, false);
newParentIdFields.add(merged);
}
}
if (conflicts.isEmpty() == false) {
throw new IllegalStateException("invalid update for join field [" + name() + "]:\n" + conflicts.toString());
}
this.parentIdFields = Collections.unmodifiableList(newParentIdFields);
}
@Override
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
ParentJoinFieldMapper fieldMapper = (ParentJoinFieldMapper) super.updateFieldType(fullNameToFieldType);
final List<ParentIdFieldMapper> newMappers = new ArrayList<> ();
for (ParentIdFieldMapper mapper : fieldMapper.parentIdFields) {
newMappers.add((ParentIdFieldMapper) mapper.updateFieldType(fullNameToFieldType));
}
fieldMapper.parentIdFields = Collections.unmodifiableList(newMappers);
return fieldMapper;
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
throw new UnsupportedOperationException("parsing is implemented in parse(), this method should NEVER be called");
}
@Override
public Mapper parse(ParseContext context) throws IOException {
// Only one join field per document
checkDuplicateJoinFields(context.doc());
context.path().add(simpleName());
XContentParser.Token token = context.parser().currentToken();
String name = null;
String parent = null;
if (token == XContentParser.Token.START_OBJECT) {
String currentFieldName = null;
while ((token = context.parser().nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = context.parser().currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
if ("name".equals(currentFieldName)) {
name = context.parser().text();
} else if ("parent".equals(currentFieldName)) {
parent = context.parser().text();
} else {
throw new IllegalArgumentException("unknown field name [" + currentFieldName + "] in join field [" + name() + "]");
}
}
}
} else if (token == XContentParser.Token.VALUE_STRING) {
name = context.parser().text();
parent = null;
} else {
throw new IllegalStateException("[" + name + "] expected START_OBJECT or VALUE_STRING but was: " + token);
}
ParentIdFieldMapper parentIdField = getParentIdFieldMapper(name, true);
ParentIdFieldMapper childParentIdField = getParentIdFieldMapper(name, false);
if (parentIdField == null && childParentIdField == null) {
throw new IllegalArgumentException("unknown join name [" + name + "] for field [" + name() + "]");
}
if (childParentIdField != null) {
// Index the document as a child
if (parent == null) {
throw new IllegalArgumentException("[parent] is missing for join field [" + name() + "]");
}
if (context.sourceToParse().routing() == null) {
throw new IllegalArgumentException("[routing] is missing for join field [" + name() + "]");
}
assert childParentIdField.getChildren().contains(name);
ParseContext externalContext = context.createExternalValueContext(parent);
childParentIdField.parse(externalContext);
}
if (parentIdField != null) {
// Index the document as a parent
assert parentIdField.getParentName().equals(name);
ParseContext externalContext = context.createExternalValueContext(context.sourceToParse().id());
parentIdField.parse(externalContext);
}
BytesRef binaryValue = new BytesRef(name);
Field field = new Field(fieldType().name(), binaryValue, fieldType());
context.doc().add(field);
context.doc().add(new SortedDocValuesField(fieldType().name(), binaryValue));
context.path().remove();
return null;
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
for (ParentIdFieldMapper field : parentIdFields) {
if (field.getChildren().size() == 1) {
builder.field(field.getParentName(), field.getChildren().iterator().next());
} else {
builder.field(field.getParentName(), field.getChildren());
}
}
}
}

@ -0,0 +1,159 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.fetch;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
public class ParentJoinFieldSubFetchPhaseTests extends ESSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singletonList(ParentJoinPlugin.class);
}
public void testSingleParentJoinField() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
.field("parent", "child")
.field("child", "grand_child")
.field("product", "item")
.endObject()
.endObject()
.endObject().string();
IndexService service = createIndex("test", Settings.EMPTY);
service.mapperService().merge("doc", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE, true);
// empty document
client().prepareIndex("test", "doc", "0")
.setSource().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
// parent document
client().prepareIndex("test", "doc", "1")
.setSource("join_field", Collections.singletonMap("name", "parent"))
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
// child document
Map<String, String> joinField = new HashMap<>();
joinField.put("name", "child");
joinField.put("parent", "1");
client().prepareIndex("test", "doc", "2")
.setSource("join_field", joinField).setRouting("1")
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
// grand_child document
joinField.clear();
joinField.put("name", "grand_child");
joinField.put("parent", "2");
client().prepareIndex("test", "doc", "3")
.setSource("join_field", joinField).setRouting("2")
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
// product document
client().prepareIndex("test", "doc", "4")
.setSource("join_field", Collections.singletonMap("name", "product"))
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
// item document
joinField.clear();
joinField.put("name", "item");
joinField.put("parent", "4");
client().prepareIndex("test", "doc", "5")
.setSource("join_field", joinField).setRouting("4").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
SearchResponse response = client().prepareSearch("test")
.setQuery(QueryBuilders.termQuery("join_field", "parent"))
.get();
assertThat(response.getHits().totalHits, equalTo(1L));
assertThat(response.getHits().getHits().length, equalTo(1));
assertThat(response.getHits().getHits()[0].field("join_field").getValue(), equalTo("parent"));
assertNull(response.getHits().getHits()[0].field("join_field#parent"));
response = client().prepareSearch("test")
.setQuery(QueryBuilders.termQuery("join_field", "child"))
.get();
assertThat(response.getHits().totalHits, equalTo(1L));
assertThat(response.getHits().getHits().length, equalTo(1));
assertThat(response.getHits().getHits()[0].field("join_field").getValue(), equalTo("child"));
assertThat(response.getHits().getHits()[0].field("join_field#parent").getValue(), equalTo("1"));
assertNull(response.getHits().getHits()[0].field("join_field#child"));
response = client().prepareSearch("test")
.setQuery(QueryBuilders.termQuery("join_field", "grand_child"))
.get();
assertThat(response.getHits().totalHits, equalTo(1L));
assertThat(response.getHits().getHits().length, equalTo(1));
assertThat(response.getHits().getHits()[0].field("join_field").getValue(), equalTo("grand_child"));
assertThat(response.getHits().getHits()[0].field("join_field#child").getValue(), equalTo("2"));
response = client().prepareSearch("test")
.setQuery(QueryBuilders.termQuery("join_field", "product"))
.get();
assertThat(response.getHits().totalHits, equalTo(1L));
assertThat(response.getHits().getHits().length, equalTo(1));
assertThat(response.getHits().getHits()[0].field("join_field").getValue(), equalTo("product"));
assertNull(response.getHits().getHits()[0].field("join_field#product"));
response = client().prepareSearch("test")
.setQuery(QueryBuilders.termQuery("join_field", "item"))
.get();
assertThat(response.getHits().totalHits, equalTo(1L));
assertThat(response.getHits().getHits().length, equalTo(1));
assertThat(response.getHits().getHits()[0].field("join_field").getValue(), equalTo("item"));
assertThat(response.getHits().getHits()[0].field("join_field#product").getValue(), equalTo("4"));
response = client().prepareSearch("test")
.addSort(SortBuilders.fieldSort("join_field"))
.get();
assertThat(response.getHits().totalHits, equalTo(6L));
assertThat(response.getHits().getHits().length, equalTo(6));
assertThat(response.getHits().getHits()[0].field("join_field").getValue(), equalTo("child"));
assertThat(response.getHits().getHits()[0].field("join_field#parent").getValue(), equalTo("1"));
assertNull(response.getHits().getHits()[0].field("join_field#child"));
assertThat(response.getHits().getHits()[1].field("join_field").getValue(), equalTo("grand_child"));
assertThat(response.getHits().getHits()[1].field("join_field#child").getValue(), equalTo("2"));
assertThat(response.getHits().getHits()[2].field("join_field").getValue(), equalTo("item"));
assertThat(response.getHits().getHits()[2].field("join_field#product").getValue(), equalTo("4"));
assertThat(response.getHits().getHits()[3].field("join_field").getValue(), equalTo("parent"));
assertNull(response.getHits().getHits()[3].field("join_field#parent"));
assertThat(response.getHits().getHits()[4].field("join_field").getValue(), equalTo("product"));
assertNull(response.getHits().getHits()[4].field("join_field#product"));
assertNull(response.getHits().getHits()[5].field("join_field"));
}
}

@ -0,0 +1,353 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.mapper;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.Collection;
import java.util.Collections;
import static org.hamcrest.Matchers.containsString;
public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singletonList(ParentJoinPlugin.class);
}
public void testSingleLevel() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
.field("parent", "child")
.endObject()
.endObject()
.endObject().string();
DocumentMapper docMapper = createIndex("test")
.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
// Doc without join
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0",
XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON));
assertNull(doc.rootDoc().getBinaryValue("join_field"));
// Doc parent
doc = docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder().startObject()
.field("join_field", "parent")
.endObject().bytes(), XContentType.JSON));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Doc child
doc = docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject()
.startObject("join_field")
.field("name", "child")
.field("parent", "1")
.endObject()
.endObject().bytes(), XContentType.JSON).routing("1"));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Unkwnown join name
MapperException exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder().startObject()
.field("join_field", "unknown")
.endObject().bytes(), XContentType.JSON)));
assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]"));
}
public void testMultipleLevels() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
.field("parent", "child")
.field("child", "grand_child")
.endObject()
.endObject()
.endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService()
.documentMapperParser().parse("type", new CompressedXContent(mapping));
// Doc without join
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0",
XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON));
assertNull(doc.rootDoc().getBinaryValue("join_field"));
// Doc parent
doc = docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder()
.startObject()
.field("join_field", "parent")
.endObject().bytes(), XContentType.JSON));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Doc child
doc = docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject()
.startObject("join_field")
.field("name", "child")
.field("parent", "1")
.endObject()
.endObject().bytes(), XContentType.JSON).routing("1"));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString());
assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Doc child missing parent
MapperException exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject()
.field("join_field", "child")
.endObject().bytes(), XContentType.JSON).routing("1")));
assertThat(exc.getRootCause().getMessage(), containsString("[parent] is missing for join field [join_field]"));
// Doc child missing routing
exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject()
.startObject("join_field")
.field("name", "child")
.field("parent", "1")
.endObject()
.endObject().bytes(), XContentType.JSON)));
assertThat(exc.getRootCause().getMessage(), containsString("[routing] is missing for join field [join_field]"));
// Doc grand_child
doc = docMapper.parse(SourceToParse.source("test", "type", "3",
XContentFactory.jsonBuilder().startObject()
.startObject("join_field")
.field("name", "grand_child")
.field("parent", "2")
.endObject()
.endObject().bytes(), XContentType.JSON).routing("1"));
assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString());
assertEquals("grand_child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Unkwnown join name
exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder().startObject()
.field("join_field", "unknown")
.endObject().bytes(), XContentType.JSON)));
assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]"));
}
public void testUpdateRelations() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.field("parent", "child")
.array("child", "grand_child1", "grand_child2")
.endObject()
.endObject().endObject().string();
IndexService indexService = createIndex("test");
indexService.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE, false);
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.array("child", "grand_child1", "grand_child2")
.endObject()
.endObject().endObject().string();
IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE, false));
assertThat(exc.getMessage(), containsString("cannot remove parent [parent] in join field [join_field]"));
}
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.field("parent", "child")
.field("child", "grand_child1")
.endObject()
.endObject().endObject().string();
IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE, false));
assertThat(exc.getMessage(), containsString("cannot remove child [grand_child2] in join field [join_field]"));
}
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.field("uber_parent", "parent")
.field("parent", "child")
.array("child", "grand_child1", "grand_child2")
.endObject()
.endObject().endObject().string();
IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE, false));
assertThat(exc.getMessage(), containsString("cannot create child [parent] from an existing parent"));
}
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.field("parent", "child")
.array("child", "grand_child1", "grand_child2")
.field("grand_child2", "grand_grand_child")
.endObject()
.endObject().endObject().string();
IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE, false));
assertThat(exc.getMessage(), containsString("cannot create parent [grand_child2] from an existing child]"));
}
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.array("parent", "child", "child2")
.array("child", "grand_child1", "grand_child2")
.endObject()
.endObject().endObject().string();
indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE, true);
ParentJoinFieldMapper mapper = (ParentJoinFieldMapper) indexService.mapperService()
.docMappers(false).iterator().next().mappers().getMapper("join_field");
assertTrue(mapper.hasChild("child2"));
assertFalse(mapper.hasParent("child2"));
assertTrue(mapper.hasChild("grand_child2"));
assertFalse(mapper.hasParent("grand_child2"));
}
{
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
.field("type", "join")
.array("parent", "child", "child2")
.array("child", "grand_child1", "grand_child2")
.array("other", "child_other1", "child_other2")
.endObject()
.endObject().endObject().string();
indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE, true);
ParentJoinFieldMapper mapper = (ParentJoinFieldMapper) indexService.mapperService()
.docMappers(false).iterator().next().mappers().getMapper("join_field");
assertTrue(mapper.hasParent("other"));
assertFalse(mapper.hasChild("other"));
assertTrue(mapper.hasChild("child_other1"));
assertFalse(mapper.hasParent("child_other1"));
assertTrue(mapper.hasChild("child_other2"));
assertFalse(mapper.hasParent("child_other2"));
}
}
public void testInvalidJoinFieldInsideObject() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("object")
.startObject("properties")
.startObject("join_field")
.field("type", "join")
.field("parent", "child")
.endObject()
.endObject()
.endObject()
.endObject().endObject().string();
IndexService indexService = createIndex("test");
MapperParsingException exc = expectThrows(MapperParsingException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE, false));
assertThat(exc.getRootCause().getMessage(),
containsString("join field [object.join_field] cannot be added inside an object or in a multi-field"));
}
public void testInvalidJoinFieldInsideMultiFields() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("number")
.field("type", "integer")
.startObject("fields")
.startObject("join_field")
.field("type", "join")
.field("parent", "child")
.endObject()
.endObject()
.endObject()
.endObject().endObject().string();
IndexService indexService = createIndex("test");
MapperParsingException exc = expectThrows(MapperParsingException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE, false));
assertThat(exc.getRootCause().getMessage(),
containsString("join field [number.join_field] cannot be added inside an object or in a multi-field"));
}
public void testMultipleJoinFields() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("join_field")
.field("type", "join")
.field("parent", "child")
.field("child", "grand_child")
.endObject()
.startObject("another_join_field")
.field("type", "join")
.field("product", "item")
.endObject()
.endObject()
.endObject().string();
DocumentMapper docMapper = createIndex("test").mapperService()
.documentMapperParser().parse("type", new CompressedXContent(mapping));
// Doc without join
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0",
XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON));
assertNull(doc.rootDoc().getBinaryValue("join_field"));
// Doc parent
MapperParsingException exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder()
.startObject()
.field("join_field", "parent")
.startObject("another_join_field")
.field("name", "item")
.field("parent", "0")
.endObject()
.endObject().bytes(), XContentType.JSON)));
assertThat(exc.getRootCause().getMessage(), containsString("cannot have two join fields in the same document"));
}
}

@ -0,0 +1,65 @@
setup:
- do:
indices.create:
index: test
body:
mappings:
doc:
properties:
join_field: { "type": "join", "parent": "child", "child": "grand_child" }
- do:
index:
index: test
type: doc
id: 1
body: { "join_field": { "name": "parent" } }
- do:
index:
index: test
type: doc
id: 2
routing: 1
body: { "join_field": { "name": "child", "parent": "1" } }
- do:
index:
index: test
type: doc
id: 3
routing: 1
body: { "join_field": { "name": "grand_child", "parent": "2" } }
- do:
indices.refresh: {}
---
"Test basic":
- skip:
version: " - 5.99.99"
reason: parent-join was added in 6.0
- do:
search:
body: { sort: ["join_field"] }
- match: { hits.total: 3 }
- match: { hits.hits.0._index: "test" }
- match: { hits.hits.0._type: "doc" }
- match: { hits.hits.0._id: "2" }
- match: { hits.hits.0.fields.join_field: ["child"] }
- match: { hits.hits.0.fields.join_field#parent: ["1"] }
- is_false: hits.hits.0.fields.join_field#child }
- match: { hits.hits.1._index: "test" }
- match: { hits.hits.1._type: "doc" }
- match: { hits.hits.1._id: "3" }
- match: { hits.hits.1.fields.join_field: ["grand_child"] }
- match: { hits.hits.1.fields.join_field#child: ["2"] }
- match: { hits.hits.2._index: "test" }
- match: { hits.hits.2._type: "doc" }
- match: { hits.hits.2._id: "1" }
- match: { hits.hits.2.fields.join_field: ["parent"] }
- is_false: hits.hits.2.fields.join_field#parent