Cleanup ParentFieldMapper:
* Remove remaining 1.x bwc logic. * Stop storing stored fields and indexed terms. The _parent field's only purpose is to support joins between parent and child type and only storing doc values is sufficient. * In the mapping the parent field mapper is now known under '{parent}#{child}' key, because this is the field the parent/child join uses too. * Added new sub fetch phase to lookup that _parent field from doc values field if that is required (before this was fetched from stored _parent field) * Removed the ability to query directly on `_parent` in the query dsl. Instead the `{parent}#{child}` field should be used. Under the hood a doc values query is used instead of a term query, because only doc values fields are stored now. * Added a new `parent_id` query to easily query child documents with a specific parent id without having to know what join field to use * Also in aggregations `_parent` field can't be used any more and `{parent}#{child}` field name should be used instead to aggregate directly on the _parent join field.
This commit is contained in:
parent
303b430480
commit
b9dc5acf2c
|
@ -19,7 +19,9 @@
|
|||
|
||||
package org.elasticsearch.index.get;
|
||||
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -50,7 +52,10 @@ import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
|||
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.internal.InternalSearchHitField;
|
||||
import org.elasticsearch.search.lookup.LeafSearchLookup;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
|
@ -350,6 +355,14 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
|||
}
|
||||
}
|
||||
|
||||
if (docMapper.parentFieldMapper().active()) {
|
||||
String parentId = ParentFieldSubFetchPhase.getParentId(docMapper.parentFieldMapper(), docIdAndVersion.context.reader(), docIdAndVersion.docId);
|
||||
if (fields == null) {
|
||||
fields = new HashMap<>(1);
|
||||
}
|
||||
fields.put(ParentFieldMapper.NAME, new GetField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
|
||||
}
|
||||
|
||||
// now, go and do the script thingy if needed
|
||||
|
||||
if (gFields != null && gFields.length > 0) {
|
||||
|
|
|
@ -70,6 +70,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
|
|||
this.fieldType = fieldType.clone();
|
||||
this.defaultFieldType = defaultFieldType.clone();
|
||||
this.defaultOptions = fieldType.indexOptions(); // we have to store it the fieldType is mutable
|
||||
this.docValuesSet = fieldType.hasDocValues();
|
||||
multiFieldsBuilder = new MultiFields.Builder();
|
||||
}
|
||||
|
||||
|
|
|
@ -22,25 +22,28 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.SortedDocValuesField;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.DocValuesTermsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.loader.SettingsLoader;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperBuilders;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -65,22 +68,13 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
public static class Defaults {
|
||||
public static final String NAME = ParentFieldMapper.NAME;
|
||||
|
||||
public static final MappedFieldType FIELD_TYPE = new ParentFieldType();
|
||||
public static final MappedFieldType JOIN_FIELD_TYPE = new ParentFieldType();
|
||||
public static final ParentFieldType FIELD_TYPE = new ParentFieldType();
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
|
||||
FIELD_TYPE.setTokenized(false);
|
||||
FIELD_TYPE.setStored(true);
|
||||
FIELD_TYPE.setOmitNorms(true);
|
||||
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
|
||||
FIELD_TYPE.setName(NAME);
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
|
||||
FIELD_TYPE.setHasDocValues(true);
|
||||
FIELD_TYPE.setDocValuesType(DocValuesType.SORTED);
|
||||
FIELD_TYPE.freeze();
|
||||
|
||||
JOIN_FIELD_TYPE.setHasDocValues(true);
|
||||
JOIN_FIELD_TYPE.setDocValuesType(DocValuesType.SORTED);
|
||||
JOIN_FIELD_TYPE.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -88,17 +82,10 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
private String parentType;
|
||||
|
||||
protected String indexName;
|
||||
|
||||
private final String documentType;
|
||||
|
||||
private final MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
|
||||
|
||||
private final MappedFieldType childJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
|
||||
|
||||
public Builder(String documentType) {
|
||||
super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
|
||||
this.indexName = name;
|
||||
super(Defaults.NAME, new ParentFieldType(Defaults.FIELD_TYPE, documentType), Defaults.FIELD_TYPE);
|
||||
this.documentType = documentType;
|
||||
builder = this;
|
||||
}
|
||||
|
@ -108,22 +95,14 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder fieldDataSettings(Settings fieldDataSettings) {
|
||||
Settings settings = Settings.builder().put(childJoinFieldType.fieldDataType().getSettings()).put(fieldDataSettings).build();
|
||||
childJoinFieldType.setFieldDataType(new FieldDataType(childJoinFieldType.fieldDataType().getType(), settings));
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ParentFieldMapper build(BuilderContext context) {
|
||||
if (parentType == null) {
|
||||
throw new MapperParsingException("[_parent] field mapping must contain the [type] option");
|
||||
}
|
||||
parentJoinFieldType.setName(joinField(documentType));
|
||||
parentJoinFieldType.setFieldDataType(null);
|
||||
childJoinFieldType.setName(joinField(parentType));
|
||||
return new ParentFieldMapper(fieldType, parentJoinFieldType, childJoinFieldType, parentType, context.indexSettings());
|
||||
name = joinField(parentType);
|
||||
setupFieldType(context);
|
||||
return new ParentFieldMapper(createParentJoinFieldMapper(documentType, context), fieldType, parentType, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -152,19 +131,40 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String parentType) {
|
||||
return new ParentFieldMapper(indexSettings, fieldType, parentType);
|
||||
public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) {
|
||||
StringFieldMapper parentJoinField = createParentJoinFieldMapper(typeName, new BuilderContext(indexSettings, new ContentPath(0)));
|
||||
MappedFieldType childJoinFieldType = Defaults.FIELD_TYPE.clone();
|
||||
childJoinFieldType.setName(joinField(null));
|
||||
return new ParentFieldMapper(parentJoinField, childJoinFieldType, null, indexSettings);
|
||||
}
|
||||
}
|
||||
|
||||
static StringFieldMapper createParentJoinFieldMapper(String docType, BuilderContext context) {
|
||||
StringFieldMapper.Builder parentJoinField = MapperBuilders.stringField(joinField(docType));
|
||||
parentJoinField.indexOptions(IndexOptions.NONE);
|
||||
parentJoinField.docValues(true);
|
||||
parentJoinField.fieldType().setDocValuesType(DocValuesType.SORTED);
|
||||
parentJoinField.fieldType().setFieldDataType(null);
|
||||
return parentJoinField.build(context);
|
||||
}
|
||||
|
||||
static final class ParentFieldType extends MappedFieldType {
|
||||
|
||||
final String documentType;
|
||||
|
||||
public ParentFieldType() {
|
||||
setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, Loading.EAGER_VALUE)));
|
||||
setFieldDataType(new FieldDataType(NAME, settingsBuilder().put(MappedFieldType.Loading.KEY, Loading.EAGER_VALUE)));
|
||||
documentType = null;
|
||||
}
|
||||
|
||||
protected ParentFieldType(ParentFieldType ref) {
|
||||
ParentFieldType(ParentFieldType ref, String documentType) {
|
||||
super(ref);
|
||||
this.documentType = documentType;
|
||||
}
|
||||
|
||||
private ParentFieldType(ParentFieldType ref) {
|
||||
super(ref);
|
||||
this.documentType = ref.documentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -177,30 +177,6 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Uid value(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
return Uid.createUid(value.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object valueForSearch(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
String sValue = value.toString();
|
||||
if (sValue == null) {
|
||||
return null;
|
||||
}
|
||||
int index = sValue.indexOf(Uid.DELIMITER);
|
||||
if (index == -1) {
|
||||
return sValue;
|
||||
}
|
||||
return sValue.substring(index + 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* We don't need to analyzer the text, and we need to convert it to UID...
|
||||
*/
|
||||
|
@ -216,67 +192,30 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
|
||||
@Override
|
||||
public Query termsQuery(List values, @Nullable QueryShardContext context) {
|
||||
if (context == null) {
|
||||
return super.termsQuery(values, context);
|
||||
BytesRef[] ids = new BytesRef[values.size()];
|
||||
for (int i = 0; i < ids.length; i++) {
|
||||
ids[i] = indexedValueForSearch(values.get(i));
|
||||
}
|
||||
|
||||
List<String> types = new ArrayList<>(context.getMapperService().types().size());
|
||||
for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) {
|
||||
if (!documentMapper.parentFieldMapper().active()) {
|
||||
types.add(documentMapper.type());
|
||||
}
|
||||
}
|
||||
|
||||
List<BytesRef> bValues = new ArrayList<>(values.size());
|
||||
for (Object value : values) {
|
||||
BytesRef bValue = BytesRefs.toBytesRef(value);
|
||||
if (Uid.hasDelimiter(bValue)) {
|
||||
bValues.add(bValue);
|
||||
} else {
|
||||
// we use all non child types, cause we don't know if its exact or not...
|
||||
for (String type : types) {
|
||||
bValues.add(Uid.createUidAsBytes(type, bValue));
|
||||
}
|
||||
}
|
||||
}
|
||||
return new TermsQuery(name(), bValues);
|
||||
BooleanQuery.Builder query = new BooleanQuery.Builder();
|
||||
query.add(new DocValuesTermsQuery(name(), ids), BooleanClause.Occur.MUST);
|
||||
query.add(new TermQuery(new Term(TypeFieldMapper.NAME, documentType)), BooleanClause.Occur.FILTER);
|
||||
return query.build();
|
||||
}
|
||||
}
|
||||
|
||||
private final String parentType;
|
||||
// determines the field data settings
|
||||
private MappedFieldType childJoinFieldType;
|
||||
// has no impact of field data settings, is just here for creating a join field, the parent field mapper in the child type pointing to this type determines the field data settings for this join field
|
||||
private final MappedFieldType parentJoinFieldType;
|
||||
// has no impact of field data settings, is just here for creating a join field,
|
||||
// the parent field mapper in the child type pointing to this type determines the field data settings for this join field
|
||||
private final StringFieldMapper parentJoinField;
|
||||
|
||||
private ParentFieldMapper(MappedFieldType fieldType, MappedFieldType parentJoinFieldType, MappedFieldType childJoinFieldType, String parentType, Settings indexSettings) {
|
||||
super(NAME, fieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
private ParentFieldMapper(StringFieldMapper parentJoinField, MappedFieldType childJoinFieldType, String parentType, Settings indexSettings) {
|
||||
super(NAME, childJoinFieldType, Defaults.FIELD_TYPE, indexSettings);
|
||||
this.parentType = parentType;
|
||||
this.parentJoinFieldType = parentJoinFieldType;
|
||||
this.parentJoinFieldType.freeze();
|
||||
this.childJoinFieldType = childJoinFieldType;
|
||||
if (childJoinFieldType != null) {
|
||||
this.childJoinFieldType.freeze();
|
||||
}
|
||||
}
|
||||
|
||||
private ParentFieldMapper(Settings indexSettings, MappedFieldType existing, String parentType) {
|
||||
this(existing == null ? Defaults.FIELD_TYPE.clone() : existing.clone(), joinFieldTypeForParentType(parentType, indexSettings), null, null, indexSettings);
|
||||
}
|
||||
|
||||
private static MappedFieldType joinFieldTypeForParentType(String parentType, Settings indexSettings) {
|
||||
MappedFieldType parentJoinFieldType = Defaults.JOIN_FIELD_TYPE.clone();
|
||||
parentJoinFieldType.setName(joinField(parentType));
|
||||
parentJoinFieldType.freeze();
|
||||
return parentJoinFieldType;
|
||||
this.parentJoinField = parentJoinField;
|
||||
}
|
||||
|
||||
public MappedFieldType getParentJoinFieldType() {
|
||||
return parentJoinFieldType;
|
||||
}
|
||||
|
||||
public MappedFieldType getChildJoinFieldType() {
|
||||
return childJoinFieldType;
|
||||
return parentJoinField.fieldType();
|
||||
}
|
||||
|
||||
public String type() {
|
||||
|
@ -298,7 +237,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
boolean parent = context.docMapper().isParent(context.type());
|
||||
if (parent) {
|
||||
addJoinFieldIfNeeded(fields, parentJoinFieldType, context.id());
|
||||
fields.add(new SortedDocValuesField(parentJoinField.fieldType().name(), new BytesRef(context.id())));
|
||||
}
|
||||
|
||||
if (!active()) {
|
||||
|
@ -309,8 +248,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
// we are in the parsing of _parent phase
|
||||
String parentId = context.parser().text();
|
||||
context.sourceToParse().parent(parentId);
|
||||
fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
|
||||
addJoinFieldIfNeeded(fields, childJoinFieldType, parentId);
|
||||
fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(parentId)));
|
||||
} else {
|
||||
// otherwise, we are running it post processing of the xcontent
|
||||
String parsedParentId = context.doc().get(Defaults.NAME);
|
||||
|
@ -321,8 +259,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
throw new MapperParsingException("No parent id provided, not within the document, and not externally");
|
||||
}
|
||||
// we did not add it in the parsing phase, add it now
|
||||
fields.add(new Field(fieldType().name(), Uid.createUid(context.stringBuilder(), parentType, parentId), fieldType()));
|
||||
addJoinFieldIfNeeded(fields, childJoinFieldType, parentId);
|
||||
fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(parentId)));
|
||||
} else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), parentType, parentId))) {
|
||||
throw new MapperParsingException("Parent id mismatch, document value is [" + Uid.createUid(parsedParentId).id() + "], while external value is [" + parentId + "]");
|
||||
}
|
||||
|
@ -331,12 +268,6 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
// we have parent mapping, yet no value was set, ignore it...
|
||||
}
|
||||
|
||||
private void addJoinFieldIfNeeded(List<Field> fields, MappedFieldType fieldType, String id) {
|
||||
if (fieldType.hasDocValues()) {
|
||||
fields.add(new SortedDocValuesField(fieldType.name(), new BytesRef(id)));
|
||||
}
|
||||
}
|
||||
|
||||
public static String joinField(String parentType) {
|
||||
return ParentFieldMapper.NAME + "#" + parentType;
|
||||
}
|
||||
|
@ -346,8 +277,9 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
private boolean joinFieldHasCustomFieldDataSettings() {
|
||||
return childJoinFieldType != null && childJoinFieldType.fieldDataType() != null && childJoinFieldType.fieldDataType().equals(Defaults.JOIN_FIELD_TYPE.fieldDataType()) == false;
|
||||
@Override
|
||||
public Iterator<Mapper> iterator() {
|
||||
return Collections.<Mapper>singleton(parentJoinField).iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -360,12 +292,16 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
builder.startObject(CONTENT_TYPE);
|
||||
builder.field("type", parentType);
|
||||
if (includeDefaults || joinFieldHasCustomFieldDataSettings()) {
|
||||
builder.field("fielddata", (Map) childJoinFieldType.fieldDataType().getSettings().getAsMap());
|
||||
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
private boolean joinFieldHasCustomFieldDataSettings() {
|
||||
return fieldType != null && fieldType.fieldDataType() != null && fieldType.fieldDataType().equals(Defaults.FIELD_TYPE.fieldDataType()) == false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
|
||||
super.doMerge(mergeWith, updateAllTypes);
|
||||
|
@ -375,18 +311,13 @@ public class ParentFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
List<String> conflicts = new ArrayList<>();
|
||||
fieldType().checkCompatibility(fieldMergeWith.fieldType(), conflicts, true); // always strict, this cannot change
|
||||
parentJoinFieldType.checkCompatibility(fieldMergeWith.parentJoinFieldType, conflicts, true); // same here
|
||||
if (childJoinFieldType != null) {
|
||||
// TODO: this can be set to false when the old parent/child impl is removed, we can do eager global ordinals loading per type.
|
||||
childJoinFieldType.checkCompatibility(fieldMergeWith.childJoinFieldType, conflicts, updateAllTypes == false);
|
||||
}
|
||||
fieldType().checkCompatibility(fieldMergeWith.fieldType, conflicts, true);
|
||||
if (conflicts.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Merge conflicts: " + conflicts);
|
||||
}
|
||||
|
||||
if (active()) {
|
||||
childJoinFieldType = fieldMergeWith.childJoinFieldType.clone();
|
||||
fieldType = fieldMergeWith.fieldType.clone();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.DocValuesTermsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public final class ParentIdQueryBuilder extends AbstractQueryBuilder<ParentIdQueryBuilder> {
|
||||
|
||||
public static final String NAME = "parent_id";
|
||||
static final ParentIdQueryBuilder PROTO = new ParentIdQueryBuilder(null, null);
|
||||
|
||||
private final String type;
|
||||
private final String id;
|
||||
|
||||
public ParentIdQueryBuilder(String type, String id) {
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
builder.field(ParentIdQueryParser.TYPE_FIELD.getPreferredName(), type);
|
||||
builder.field(ParentIdQueryParser.ID_FIELD.getPreferredName(), id);
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
DocumentMapper childDocMapper = context.getMapperService().documentMapper(type);
|
||||
if (childDocMapper == null) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]");
|
||||
}
|
||||
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
|
||||
if (parentFieldMapper.active() == false) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] _parent field has no parent type configured");
|
||||
}
|
||||
String fieldName = ParentFieldMapper.joinField(parentFieldMapper.type());
|
||||
return new DocValuesTermsQuery(fieldName, id);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ParentIdQueryBuilder doReadFrom(StreamInput in) throws IOException {
|
||||
String type = in.readString();
|
||||
String id = in.readString();
|
||||
return new ParentIdQueryBuilder(type, id);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeString(type);
|
||||
out.writeString(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(ParentIdQueryBuilder that) {
|
||||
return Objects.equals(type, that.type) && Objects.equals(id, that.id);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(type, id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public final class ParentIdQueryParser implements QueryParser<ParentIdQueryBuilder> {
|
||||
|
||||
public static final ParseField ID_FIELD = new ParseField("id");
|
||||
public static final ParseField TYPE_FIELD = new ParseField("type", "child_type");
|
||||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[]{ParentIdQueryBuilder.NAME};
|
||||
}
|
||||
|
||||
@Override
|
||||
public ParentIdQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String type = null;
|
||||
String id = null;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
|
||||
type = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ID_FIELD)) {
|
||||
id = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
ParentIdQueryBuilder queryBuilder = new ParentIdQueryBuilder(type, id);
|
||||
queryBuilder.queryName(queryName);
|
||||
queryBuilder.boost(boost);
|
||||
return queryBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ParentIdQueryBuilder getBuilderPrototype() {
|
||||
return ParentIdQueryBuilder.PROTO;
|
||||
}
|
||||
}
|
|
@ -490,6 +490,14 @@ public abstract class QueryBuilders {
|
|||
return new HasParentQueryBuilder(type, query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new parent id query that returns all child documents of the specified type that
|
||||
* point to the specified id.
|
||||
*/
|
||||
public static ParentIdQueryBuilder parentId(String type, String id) {
|
||||
return new ParentIdQueryBuilder(type, id);
|
||||
}
|
||||
|
||||
public static NestedQueryBuilder nestedQuery(String path, QueryBuilder query) {
|
||||
return new NestedQueryBuilder(path, query);
|
||||
}
|
||||
|
|
|
@ -68,6 +68,7 @@ import org.elasticsearch.index.query.MatchQueryParser;
|
|||
import org.elasticsearch.index.query.MoreLikeThisQueryParser;
|
||||
import org.elasticsearch.index.query.MultiMatchQueryParser;
|
||||
import org.elasticsearch.index.query.NestedQueryParser;
|
||||
import org.elasticsearch.index.query.ParentIdQueryParser;
|
||||
import org.elasticsearch.index.query.PrefixQueryParser;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParser;
|
||||
|
@ -218,6 +219,7 @@ import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase;
|
|||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.matchedqueries.MatchedQueriesFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.parent.ParentFieldSubFetchPhase;
|
||||
import org.elasticsearch.search.fetch.script.ScriptFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceSubPhase;
|
||||
import org.elasticsearch.search.fetch.version.VersionFetchSubPhase;
|
||||
|
@ -336,6 +338,7 @@ public class SearchModule extends AbstractModule {
|
|||
fetchSubPhaseMultibinder.addBinding().to(VersionFetchSubPhase.class);
|
||||
fetchSubPhaseMultibinder.addBinding().to(MatchedQueriesFetchSubPhase.class);
|
||||
fetchSubPhaseMultibinder.addBinding().to(HighlightPhase.class);
|
||||
fetchSubPhaseMultibinder.addBinding().to(ParentFieldSubFetchPhase.class);
|
||||
for (Class<? extends FetchSubPhase> clazz : fetchSubPhases) {
|
||||
fetchSubPhaseMultibinder.addBinding().to(clazz);
|
||||
}
|
||||
|
@ -523,6 +526,7 @@ public class SearchModule extends AbstractModule {
|
|||
registerQueryParser(GeoPolygonQueryParser::new);
|
||||
registerQueryParser(ExistsQueryParser::new);
|
||||
registerQueryParser(MatchNoneQueryParser::new);
|
||||
registerQueryParser(ParentIdQueryParser::new);
|
||||
if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) {
|
||||
registerQueryParser(GeoShapeQueryParser::new);
|
||||
}
|
||||
|
|
|
@ -1033,22 +1033,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
final Map<String, MappedFieldType> warmUp = new HashMap<>();
|
||||
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
|
||||
for (FieldMapper fieldMapper : docMapper.mappers()) {
|
||||
final FieldDataType fieldDataType;
|
||||
final String indexName;
|
||||
if (fieldMapper instanceof ParentFieldMapper) {
|
||||
MappedFieldType joinFieldType = ((ParentFieldMapper) fieldMapper).getChildJoinFieldType();
|
||||
if (joinFieldType == null) {
|
||||
continue;
|
||||
}
|
||||
fieldDataType = joinFieldType.fieldDataType();
|
||||
// TODO: this can be removed in 3.0 when the old parent/child impl is removed:
|
||||
// related to: https://github.com/elastic/elasticsearch/pull/12418
|
||||
indexName = fieldMapper.fieldType().name();
|
||||
} else {
|
||||
fieldDataType = fieldMapper.fieldType().fieldDataType();
|
||||
indexName = fieldMapper.fieldType().name();
|
||||
}
|
||||
|
||||
final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
|
||||
final String indexName = fieldMapper.fieldType().name();
|
||||
if (fieldDataType == null) {
|
||||
continue;
|
||||
}
|
||||
|
@ -1101,21 +1087,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>();
|
||||
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
|
||||
for (FieldMapper fieldMapper : docMapper.mappers()) {
|
||||
final FieldDataType fieldDataType;
|
||||
final String indexName;
|
||||
if (fieldMapper instanceof ParentFieldMapper) {
|
||||
MappedFieldType joinFieldType = ((ParentFieldMapper) fieldMapper).getChildJoinFieldType();
|
||||
if (joinFieldType == null) {
|
||||
continue;
|
||||
}
|
||||
fieldDataType = joinFieldType.fieldDataType();
|
||||
// TODO: this can be removed in 3.0 when the old parent/child impl is removed:
|
||||
// related to: https://github.com/elastic/elasticsearch/pull/12418
|
||||
indexName = fieldMapper.fieldType().name();
|
||||
} else {
|
||||
fieldDataType = fieldMapper.fieldType().fieldDataType();
|
||||
indexName = fieldMapper.fieldType().name();
|
||||
}
|
||||
final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
|
||||
final String indexName = fieldMapper.fieldType().name();
|
||||
if (fieldDataType == null) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.ConstantScoreScorer;
|
||||
import org.apache.lucene.search.ConstantScoreWeight;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.DocValuesTermsQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
|
@ -284,20 +285,18 @@ public final class InnerHitsContext {
|
|||
|
||||
@Override
|
||||
public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException {
|
||||
final String field;
|
||||
final String term;
|
||||
final Query hitQuery;
|
||||
if (isParentHit(hitContext.hit())) {
|
||||
field = ParentFieldMapper.NAME;
|
||||
term = Uid.createUid(hitContext.hit().type(), hitContext.hit().id());
|
||||
String field = ParentFieldMapper.joinField(hitContext.hit().type());
|
||||
hitQuery = new DocValuesTermsQuery(field, hitContext.hit().id());
|
||||
} else if (isChildHit(hitContext.hit())) {
|
||||
DocumentMapper hitDocumentMapper = mapperService.documentMapper(hitContext.hit().type());
|
||||
final String parentType = hitDocumentMapper.parentFieldMapper().type();
|
||||
field = UidFieldMapper.NAME;
|
||||
SearchHitField parentField = hitContext.hit().field(ParentFieldMapper.NAME);
|
||||
if (parentField == null) {
|
||||
throw new IllegalStateException("All children must have a _parent");
|
||||
}
|
||||
term = Uid.createUid(parentType, (String) parentField.getValue());
|
||||
hitQuery = new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUid(parentType, parentField.getValue())));
|
||||
} else {
|
||||
return Lucene.EMPTY_TOP_DOCS;
|
||||
}
|
||||
|
@ -305,9 +304,9 @@ public final class InnerHitsContext {
|
|||
BooleanQuery q = new BooleanQuery.Builder()
|
||||
.add(query.query(), Occur.MUST)
|
||||
// Only include docs that have the current hit as parent
|
||||
.add(new TermQuery(new Term(field, term)), Occur.MUST)
|
||||
.add(hitQuery, Occur.FILTER)
|
||||
// Only include docs that have this inner hits type
|
||||
.add(documentMapper.typeFilter(), Occur.MUST)
|
||||
.add(documentMapper.typeFilter(), Occur.FILTER)
|
||||
.build();
|
||||
if (size() == 0) {
|
||||
final int count = context.searcher().count(q);
|
||||
|
|
|
@ -0,0 +1,88 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.fetch.parent;
|
||||
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHitField;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class ParentFieldSubFetchPhase implements FetchSubPhase {
|
||||
|
||||
@Override
|
||||
public Map<String, ? extends SearchParseElement> parseElements() {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hitExecutionNeeded(SearchContext context) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void hitExecute(SearchContext context, HitContext hitContext) {
|
||||
ParentFieldMapper parentFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).parentFieldMapper();
|
||||
if (parentFieldMapper.active() == false) {
|
||||
return;
|
||||
}
|
||||
|
||||
String parentId = getParentId(parentFieldMapper, hitContext.reader(), hitContext.docId());
|
||||
Map<String, SearchHitField> fields = hitContext.hit().fieldsOrNull();
|
||||
if (fields == null) {
|
||||
fields = new HashMap<>();
|
||||
hitContext.hit().fields(fields);
|
||||
}
|
||||
fields.put(ParentFieldMapper.NAME, new InternalSearchHitField(ParentFieldMapper.NAME, Collections.singletonList(parentId)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hitsExecutionNeeded(SearchContext context) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void hitsExecute(SearchContext context, InternalSearchHit[] hits) {
|
||||
}
|
||||
|
||||
public static String getParentId(ParentFieldMapper fieldMapper, LeafReader reader, int docId) {
|
||||
try {
|
||||
SortedDocValues docValues = reader.getSortedDocValues(fieldMapper.name());
|
||||
BytesRef parentId = docValues.get(docId);
|
||||
assert parentId.length > 0;
|
||||
return parentId.utf8ToString();
|
||||
} catch (IOException e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -164,11 +164,11 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase {
|
|||
}
|
||||
|
||||
public void testSorting() throws Exception {
|
||||
IndexFieldData indexFieldData = getForField(childType);
|
||||
IndexFieldData indexFieldData = getForField(parentType);
|
||||
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true));
|
||||
IndexFieldData.XFieldComparatorSource comparator = indexFieldData.comparatorSource("_last", MultiValueMode.MIN, null);
|
||||
|
||||
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.NAME, comparator, false)));
|
||||
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, false)));
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(0));
|
||||
|
@ -188,7 +188,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase {
|
|||
assertThat(topDocs.scoreDocs[7].doc, equalTo(7));
|
||||
assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[7]).fields[0]), equalTo(null));
|
||||
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.NAME, comparator, true)));
|
||||
topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField(ParentFieldMapper.joinField(parentType), comparator, true)));
|
||||
assertThat(topDocs.totalHits, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs.length, equalTo(8));
|
||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(3));
|
||||
|
|
|
@ -47,10 +47,10 @@ public class ParentFieldMapperTests extends ESTestCase {
|
|||
assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true));
|
||||
assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
|
||||
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent"));
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.LAZY));
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true));
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
|
||||
assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent"));
|
||||
assertThat(parentFieldMapper.fieldType().fieldDataType().getLoading(), equalTo(Loading.LAZY));
|
||||
assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true));
|
||||
assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED));
|
||||
}
|
||||
|
||||
public void testPost2Dot0EagerLoading() {
|
||||
|
@ -65,10 +65,10 @@ public class ParentFieldMapperTests extends ESTestCase {
|
|||
assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true));
|
||||
assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
|
||||
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent"));
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER));
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true));
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
|
||||
assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent"));
|
||||
assertThat(parentFieldMapper.fieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER));
|
||||
assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true));
|
||||
assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED));
|
||||
}
|
||||
|
||||
public void testPost2Dot0EagerGlobalOrdinalsLoading() {
|
||||
|
@ -83,10 +83,10 @@ public class ParentFieldMapperTests extends ESTestCase {
|
|||
assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true));
|
||||
assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
|
||||
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().name(), equalTo("_parent#parent"));
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER_GLOBAL_ORDINALS));
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().hasDocValues(), is(true));
|
||||
assertThat(parentFieldMapper.getChildJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED));
|
||||
assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent"));
|
||||
assertThat(parentFieldMapper.fieldType().fieldDataType().getLoading(), equalTo(Loading.EAGER_GLOBAL_ORDINALS));
|
||||
assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true));
|
||||
assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED));
|
||||
}
|
||||
|
||||
private static Settings post2Dot0IndexSettings() {
|
||||
|
|
|
@ -55,6 +55,6 @@ public class ParentMappingTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.bytes()).type("type").id("1").parent("1122"));
|
||||
|
||||
assertEquals(Uid.createUid("p_type", "1122"), doc.rootDoc().get("_parent"));
|
||||
assertEquals("1122", doc.rootDoc().getBinaryValue("_parent#p_type").utf8ToString());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,127 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.DocValuesTermsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.TestSearchContext;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQueryBuilder> {
|
||||
|
||||
protected static final String PARENT_TYPE = "parent";
|
||||
protected static final String CHILD_TYPE = "child";
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
MapperService mapperService = queryShardContext().getMapperService();
|
||||
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
DATE_FIELD_NAME, "type=date",
|
||||
OBJECT_FIELD_NAME, "type=object"
|
||||
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
|
||||
"_parent", "type=" + PARENT_TYPE,
|
||||
STRING_FIELD_NAME, "type=string",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
DATE_FIELD_NAME, "type=date",
|
||||
OBJECT_FIELD_NAME, "type=object"
|
||||
).string()), MapperService.MergeReason.MAPPING_UPDATE, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryShardContext().getMapperService();
|
||||
final IndexFieldDataService fieldData = indexFieldDataService();
|
||||
TestSearchContext testSearchContext = new TestSearchContext() {
|
||||
private InnerHitsContext context;
|
||||
|
||||
|
||||
@Override
|
||||
public void innerHits(InnerHitsContext innerHitsContext) {
|
||||
context = innerHitsContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InnerHitsContext innerHits() {
|
||||
return context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MapperService mapperService() {
|
||||
return mapperService; // need to build / parse inner hits sort fields
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldDataService fieldData() {
|
||||
return fieldData; // need to build / parse inner hits sort fields
|
||||
}
|
||||
};
|
||||
testSearchContext.setTypes(types);
|
||||
SearchContext.setCurrent(testSearchContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ParentIdQueryBuilder doCreateTestQueryBuilder() {
|
||||
return new ParentIdQueryBuilder(CHILD_TYPE, randomAsciiOfLength(4));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(ParentIdQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, Matchers.instanceOf(DocValuesTermsQuery.class));
|
||||
DocValuesTermsQuery termsQuery = (DocValuesTermsQuery) query;
|
||||
// there are no getters to get the field and terms on DocValuesTermsQuery, so lets validate by creating a
|
||||
// new query based on the builder:
|
||||
assertThat(termsQuery, Matchers.equalTo(new DocValuesTermsQuery("_parent#" + PARENT_TYPE, queryBuilder.getId())));
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
String query =
|
||||
"{\n" +
|
||||
" \"parent_id\" : {\n" +
|
||||
" \"type\" : \"child\",\n" +
|
||||
" \"id\" : \"123\",\n" +
|
||||
" \"boost\" : 3.0,\n" +
|
||||
" \"_name\" : \"name\"" +
|
||||
" }\n" +
|
||||
"}";
|
||||
ParentIdQueryBuilder queryBuilder = (ParentIdQueryBuilder) parseQuery(query);
|
||||
checkGeneratedJson(query, queryBuilder);
|
||||
assertThat(queryBuilder.getType(), Matchers.equalTo("child"));
|
||||
assertThat(queryBuilder.getId(), Matchers.equalTo("123"));
|
||||
assertThat(queryBuilder.boost(), Matchers.equalTo(3f));
|
||||
assertThat(queryBuilder.queryName(), Matchers.equalTo("name"));
|
||||
}
|
||||
|
||||
}
|
|
@ -47,11 +47,11 @@ public class ParentIdAggIT extends ESIntegTestCase {
|
|||
|
||||
refresh();
|
||||
ensureGreen("testidx");
|
||||
SearchResponse searchResponse = client().prepareSearch("testidx").setTypes("childtype").setQuery(matchAllQuery()).addAggregation(AggregationBuilders.terms("children").field("_parent")).get();
|
||||
SearchResponse searchResponse = client().prepareSearch("testidx").setTypes("childtype").setQuery(matchAllQuery()).addAggregation(AggregationBuilders.terms("children").field("_parent#parenttype")).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
|
||||
assertSearchResponse(searchResponse);
|
||||
assertThat(searchResponse.getAggregations().getAsMap().get("children"), instanceOf(Terms.class));
|
||||
Terms terms = (Terms) searchResponse.getAggregations().getAsMap().get("children");
|
||||
assertThat(terms.getBuckets().iterator().next().getDocCount(), equalTo(2l));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,6 +67,7 @@ import static org.elasticsearch.index.query.QueryBuilders.idsQuery;
|
|||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.parentId;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
|
@ -208,7 +209,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1"));
|
||||
|
||||
// TEST matching on parent
|
||||
searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent", "p1")).fields("_parent").get();
|
||||
searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).fields("_parent").get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2")));
|
||||
|
@ -216,7 +217,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2")));
|
||||
assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent:p1")).fields("_parent").get();
|
||||
searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).fields("_parent").get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||
assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2")));
|
||||
|
@ -953,70 +954,63 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getAt(0).score(), equalTo(3.0f));
|
||||
}
|
||||
|
||||
public void testParentFieldFilter() throws Exception {
|
||||
public void testParentFieldQuery() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(settingsBuilder().put(indexSettings())
|
||||
.put("index.refresh_interval", -1))
|
||||
.addMapping("parent")
|
||||
.addMapping("child", "_parent", "type=parent")
|
||||
.addMapping("child2", "_parent", "type=parent"));
|
||||
.addMapping("child", "_parent", "type=parent"));
|
||||
ensureGreen();
|
||||
|
||||
// test term filter
|
||||
SearchResponse response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1")))
|
||||
SearchResponse response = client().prepareSearch("test").setQuery(termQuery("_parent", "p1"))
|
||||
.get();
|
||||
assertHitCount(response, 0l);
|
||||
assertHitCount(response, 0L);
|
||||
|
||||
client().prepareIndex("test", "some_type", "1").setSource("field", "value").get();
|
||||
client().prepareIndex("test", "parent", "p1").setSource("p_field", "value").get();
|
||||
client().prepareIndex("test", "child", "c1").setSource("c_field", "value").setParent("p1").get();
|
||||
|
||||
response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1"))).execute()
|
||||
.actionGet();
|
||||
assertHitCount(response, 0l);
|
||||
client().prepareIndex("test", "child", "c1").setSource("{}").setParent("p1").get();
|
||||
refresh();
|
||||
|
||||
response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1"))).execute()
|
||||
.actionGet();
|
||||
assertHitCount(response, 1l);
|
||||
response = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).get();
|
||||
assertHitCount(response, 1L);
|
||||
|
||||
response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "parent#p1"))).execute()
|
||||
.actionGet();
|
||||
assertHitCount(response, 1l);
|
||||
|
||||
client().prepareIndex("test", "parent2", "p1").setSource("p_field", "value").setRefresh(true).get();
|
||||
|
||||
response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "p1"))).execute()
|
||||
.actionGet();
|
||||
assertHitCount(response, 1l);
|
||||
|
||||
response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termQuery("_parent", "parent#p1"))).execute()
|
||||
.actionGet();
|
||||
assertHitCount(response, 1l);
|
||||
|
||||
// test terms filter
|
||||
client().prepareIndex("test", "child2", "c1").setSource("c_field", "value").setParent("p1").get();
|
||||
response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "p1"))).execute()
|
||||
.actionGet();
|
||||
assertHitCount(response, 1l);
|
||||
|
||||
response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "parent#p1"))).execute()
|
||||
.actionGet();
|
||||
assertHitCount(response, 1l);
|
||||
response = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).get();
|
||||
assertHitCount(response, 1L);
|
||||
|
||||
client().prepareIndex("test", "child", "c2").setSource("{}").setParent("p2").get();
|
||||
refresh();
|
||||
response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "p1"))).execute()
|
||||
.actionGet();
|
||||
assertHitCount(response, 2l);
|
||||
|
||||
refresh();
|
||||
response = client().prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "p1", "p1"))).execute()
|
||||
.actionGet();
|
||||
assertHitCount(response, 2l);
|
||||
response = client().prepareSearch("test").setQuery(termsQuery("_parent#parent", "p1", "p2")).get();
|
||||
assertHitCount(response, 2L);
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("_parent", "parent#p1", "parent2#p1"))).get();
|
||||
assertHitCount(response, 2l);
|
||||
.setQuery(boolQuery()
|
||||
.should(termQuery("_parent#parent", "p1"))
|
||||
.should(termQuery("_parent#parent", "p2"))
|
||||
).get();
|
||||
assertHitCount(response, 2L);
|
||||
}
|
||||
|
||||
public void testParentIdQuery() throws Exception {
|
||||
assertAcked(prepareCreate("test")
|
||||
.setSettings(settingsBuilder().put(indexSettings())
|
||||
.put("index.refresh_interval", -1))
|
||||
.addMapping("parent")
|
||||
.addMapping("child", "_parent", "type=parent"));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "child", "c1").setSource("{}").setParent("p1").get();
|
||||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch("test").setQuery(parentId("child", "p1")).get();
|
||||
assertHitCount(response, 1L);
|
||||
|
||||
client().prepareIndex("test", "child", "c2").setSource("{}").setParent("p2").get();
|
||||
refresh();
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(boolQuery()
|
||||
.should(parentId("child", "p1"))
|
||||
.should(parentId("child", "p2"))
|
||||
).get();
|
||||
assertHitCount(response, 2L);
|
||||
}
|
||||
|
||||
public void testHasChildNotBeingCached() throws IOException {
|
||||
|
@ -1459,7 +1453,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase {
|
|||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(multiMatchQuery("1", "_parent"))
|
||||
.setQuery(multiMatchQuery("1", "_parent#type1"))
|
||||
.get();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(1l));
|
||||
|
|
|
@ -157,7 +157,7 @@ public class ParentFieldLoadingIT extends ESIntegTestCase {
|
|||
MapperService mapperService = indexService.mapperService();
|
||||
DocumentMapper documentMapper = mapperService.documentMapper("child");
|
||||
if (documentMapper != null) {
|
||||
verified = documentMapper.parentFieldMapper().getChildJoinFieldType().fieldDataType().getLoading() == MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS;
|
||||
verified = documentMapper.parentFieldMapper().fieldType().fieldDataType().getLoading() == MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS;
|
||||
}
|
||||
}
|
||||
assertTrue(verified);
|
||||
|
|
|
@ -181,6 +181,12 @@ When `max_children` was set to `0` on the `has_child` query then there was no up
|
|||
are allowed to match. This has changed and `0` now really means to zero child documents are allowed. If no upper limit
|
||||
is needed then the `max_children` option shouldn't be defined at all on the `has_child` query.
|
||||
|
||||
==== `_parent` field no longer indexed
|
||||
|
||||
The join between parent and child documents no longer relies on indexed fields and therefor from `3.0.0` onwards
|
||||
the `_parent` indexed field won't be indexed. In order to find documents that referrer to a specific parent id
|
||||
the new `parent_id` query can be used. The get response and hits inside the search response remain to include
|
||||
the parent id under the `_parent` key.
|
||||
|
||||
[[breaking_30_settings_changes]]
|
||||
=== Settings changes
|
||||
|
|
|
@ -29,4 +29,6 @@ include::has-child-query.asciidoc[]
|
|||
|
||||
include::has-parent-query.asciidoc[]
|
||||
|
||||
include::parent-id-query.asciidoc[]
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
[[query-dsl-parent-id-query]]
|
||||
=== Parent Id Query
|
||||
|
||||
added[3.0.0]
|
||||
|
||||
The `parent_id` query can be used to find a child document pointing to a particular parent id.
|
||||
|
||||
The actual underlying Lucene field that is used to store to what parent id a child document is referring to
|
||||
is determined by the child type's `_parent` field. This query helps by selecting the right field based
|
||||
on the specified child type. Example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"parent_id" : {
|
||||
"type" : "blog_tag",
|
||||
"id" : "1"
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Parameters
|
||||
|
||||
This query has two required parameters:
|
||||
|
||||
[horizontal]
|
||||
`type`::
|
||||
The child type. This must be a type with `_parent` field.
|
||||
|
||||
`id`::
|
||||
The required parent id select documents must referrer to.
|
Loading…
Reference in New Issue