Merge branch 'mattweber-multiple_collapse_inner_hits'

This commit is contained in:
Jim Ferenczi 2017-05-26 13:28:08 +02:00
commit 9ef414fead
8 changed files with 297 additions and 65 deletions

View File

@ -32,6 +32,7 @@ import org.elasticsearch.search.collapse.CollapseBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import java.util.function.Function; import java.util.function.Function;
/** /**
@ -59,7 +60,7 @@ final class ExpandSearchPhase extends SearchPhase {
final SearchRequest searchRequest = context.getRequest(); final SearchRequest searchRequest = context.getRequest();
return searchRequest.source() != null && return searchRequest.source() != null &&
searchRequest.source().collapse() != null && searchRequest.source().collapse() != null &&
searchRequest.source().collapse().getInnerHit() != null; searchRequest.source().collapse().getInnerHits().isEmpty() == false;
} }
@Override @Override
@ -67,6 +68,7 @@ final class ExpandSearchPhase extends SearchPhase {
if (isCollapseRequest() && searchResponse.getHits().getHits().length > 0) { if (isCollapseRequest() && searchResponse.getHits().getHits().length > 0) {
SearchRequest searchRequest = context.getRequest(); SearchRequest searchRequest = context.getRequest();
CollapseBuilder collapseBuilder = searchRequest.source().collapse(); CollapseBuilder collapseBuilder = searchRequest.source().collapse();
final List<InnerHitBuilder> innerHitBuilders = collapseBuilder.getInnerHits();
MultiSearchRequest multiRequest = new MultiSearchRequest(); MultiSearchRequest multiRequest = new MultiSearchRequest();
if (collapseBuilder.getMaxConcurrentGroupRequests() > 0) { if (collapseBuilder.getMaxConcurrentGroupRequests() > 0) {
multiRequest.maxConcurrentSearchRequests(collapseBuilder.getMaxConcurrentGroupRequests()); multiRequest.maxConcurrentSearchRequests(collapseBuilder.getMaxConcurrentGroupRequests());
@ -83,27 +85,31 @@ final class ExpandSearchPhase extends SearchPhase {
if (origQuery != null) { if (origQuery != null) {
groupQuery.must(origQuery); groupQuery.must(origQuery);
} }
SearchSourceBuilder sourceBuilder = buildExpandSearchSourceBuilder(collapseBuilder.getInnerHit()) for (InnerHitBuilder innerHitBuilder : innerHitBuilders) {
.query(groupQuery); SearchSourceBuilder sourceBuilder = buildExpandSearchSourceBuilder(innerHitBuilder)
SearchRequest groupRequest = new SearchRequest(searchRequest.indices()) .query(groupQuery);
.types(searchRequest.types()) SearchRequest groupRequest = new SearchRequest(searchRequest.indices())
.source(sourceBuilder); .types(searchRequest.types())
multiRequest.add(groupRequest); .source(sourceBuilder);
multiRequest.add(groupRequest);
}
} }
context.getSearchTransport().sendExecuteMultiSearch(multiRequest, context.getTask(), context.getSearchTransport().sendExecuteMultiSearch(multiRequest, context.getTask(),
ActionListener.wrap(response -> { ActionListener.wrap(response -> {
Iterator<MultiSearchResponse.Item> it = response.iterator(); Iterator<MultiSearchResponse.Item> it = response.iterator();
for (SearchHit hit : searchResponse.getHits()) { for (SearchHit hit : searchResponse.getHits()) {
MultiSearchResponse.Item item = it.next(); for (InnerHitBuilder innerHitBuilder : innerHitBuilders) {
if (item.isFailure()) { MultiSearchResponse.Item item = it.next();
context.onPhaseFailure(this, "failed to expand hits", item.getFailure()); if (item.isFailure()) {
return; context.onPhaseFailure(this, "failed to expand hits", item.getFailure());
return;
}
SearchHits innerHits = item.getResponse().getHits();
if (hit.getInnerHits() == null) {
hit.setInnerHits(new HashMap<>(innerHitBuilders.size()));
}
hit.getInnerHits().put(innerHitBuilder.getName(), innerHits);
} }
SearchHits innerHits = item.getResponse().getHits();
if (hit.getInnerHits() == null) {
hit.setInnerHits(new HashMap<>(1));
}
hit.getInnerHits().put(collapseBuilder.getInnerHit().getName(), innerHits);
} }
context.executeNextPhase(this, nextPhaseFactory.apply(searchResponse)); context.executeNextPhase(this, nextPhaseFactory.apply(searchResponse));
}, context::onFailure) }, context::onFailure)

View File

@ -22,13 +22,18 @@ import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.AbstractObjectParser;
import org.elasticsearch.common.xcontent.ContextParser;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper;
@ -38,12 +43,16 @@ import org.elasticsearch.search.SearchContextException;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.function.BiConsumer;
/** /**
* A builder that enables field collapsing on search request. * A builder that enables field collapsing on search request.
*/ */
public class CollapseBuilder extends ToXContentToBytes implements Writeable { public class CollapseBuilder implements Writeable, ToXContentObject {
public static final ParseField FIELD_FIELD = new ParseField("field"); public static final ParseField FIELD_FIELD = new ParseField("field");
public static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits"); public static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits");
public static final ParseField MAX_CONCURRENT_GROUP_REQUESTS_FIELD = new ParseField("max_concurrent_group_searches"); public static final ParseField MAX_CONCURRENT_GROUP_REQUESTS_FIELD = new ParseField("max_concurrent_group_searches");
@ -53,12 +62,27 @@ public class CollapseBuilder extends ToXContentToBytes implements Writeable {
static { static {
PARSER.declareString(CollapseBuilder::setField, FIELD_FIELD); PARSER.declareString(CollapseBuilder::setField, FIELD_FIELD);
PARSER.declareInt(CollapseBuilder::setMaxConcurrentGroupRequests, MAX_CONCURRENT_GROUP_REQUESTS_FIELD); PARSER.declareInt(CollapseBuilder::setMaxConcurrentGroupRequests, MAX_CONCURRENT_GROUP_REQUESTS_FIELD);
PARSER.declareObject(CollapseBuilder::setInnerHits, PARSER.declareField((parser, builder, context) -> {
(p, c) -> InnerHitBuilder.fromXContent(c), INNER_HITS_FIELD); XContentParser.Token currentToken = parser.currentToken();
if (currentToken == XContentParser.Token.START_OBJECT) {
builder.setInnerHits(InnerHitBuilder.fromXContent(context));
} else if (currentToken == XContentParser.Token.START_ARRAY) {
List<InnerHitBuilder> innerHitBuilders = new ArrayList<>();
for (currentToken = parser.nextToken(); currentToken != XContentParser.Token.END_ARRAY; currentToken = parser.nextToken()) {
if (currentToken == XContentParser.Token.START_OBJECT) {
innerHitBuilders.add(InnerHitBuilder.fromXContent(context));
} else {
throw new ParsingException(parser.getTokenLocation(), "Invalid token in inner_hits array");
}
}
builder.setInnerHits(innerHitBuilders);
}
}, INNER_HITS_FIELD, ObjectParser.ValueType.OBJECT_ARRAY);
} }
private String field; private String field;
private InnerHitBuilder innerHit; private List<InnerHitBuilder> innerHits = Collections.emptyList();
private int maxConcurrentGroupRequests = 0; private int maxConcurrentGroupRequests = 0;
private CollapseBuilder() {} private CollapseBuilder() {}
@ -75,22 +99,35 @@ public class CollapseBuilder extends ToXContentToBytes implements Writeable {
public CollapseBuilder(StreamInput in) throws IOException { public CollapseBuilder(StreamInput in) throws IOException {
this.field = in.readString(); this.field = in.readString();
this.maxConcurrentGroupRequests = in.readVInt(); this.maxConcurrentGroupRequests = in.readVInt();
this.innerHit = in.readOptionalWriteable(InnerHitBuilder::new); if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha1_UNRELEASED)) {
this.innerHits = in.readList(InnerHitBuilder::new);
} else {
InnerHitBuilder innerHitBuilder = in.readOptionalWriteable(InnerHitBuilder::new);
if (innerHitBuilder != null) {
this.innerHits = Collections.singletonList(innerHitBuilder);
} else {
this.innerHits = Collections.emptyList();
}
}
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeString(field); out.writeString(field);
out.writeVInt(maxConcurrentGroupRequests); out.writeVInt(maxConcurrentGroupRequests);
if (out.getVersion().before(Version.V_5_5_0_UNRELEASED)) { if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha1_UNRELEASED)) {
final boolean hasInnerHit = innerHit != null; out.writeList(innerHits);
} else {
boolean hasInnerHit = innerHits.isEmpty() == false;
out.writeBoolean(hasInnerHit); out.writeBoolean(hasInnerHit);
if (hasInnerHit) { if (hasInnerHit) {
innerHit.writeToCollapseBWC(out); if (out.getVersion().before(Version.V_5_5_0_UNRELEASED)) {
innerHits.get(0).writeToCollapseBWC(out);
} else {
innerHits.get(0).writeTo(out);
}
} }
} else { }
out.writeOptionalWriteable(innerHit);
}
} }
public static CollapseBuilder fromXContent(QueryParseContext context) throws IOException { public static CollapseBuilder fromXContent(QueryParseContext context) throws IOException {
@ -108,7 +145,12 @@ public class CollapseBuilder extends ToXContentToBytes implements Writeable {
} }
public CollapseBuilder setInnerHits(InnerHitBuilder innerHit) { public CollapseBuilder setInnerHits(InnerHitBuilder innerHit) {
this.innerHit = innerHit; this.innerHits = Collections.singletonList(innerHit);
return this;
}
public CollapseBuilder setInnerHits(List<InnerHitBuilder> innerHits) {
this.innerHits = innerHits;
return this; return this;
} }
@ -130,8 +172,8 @@ public class CollapseBuilder extends ToXContentToBytes implements Writeable {
/** /**
* The inner hit options to expand the collapsed results * The inner hit options to expand the collapsed results
*/ */
public InnerHitBuilder getInnerHit() { public List<InnerHitBuilder> getInnerHits() {
return this.innerHit; return this.innerHits;
} }
/** /**
@ -154,8 +196,16 @@ public class CollapseBuilder extends ToXContentToBytes implements Writeable {
if (maxConcurrentGroupRequests > 0) { if (maxConcurrentGroupRequests > 0) {
builder.field(MAX_CONCURRENT_GROUP_REQUESTS_FIELD.getPreferredName(), maxConcurrentGroupRequests); builder.field(MAX_CONCURRENT_GROUP_REQUESTS_FIELD.getPreferredName(), maxConcurrentGroupRequests);
} }
if (innerHit != null) { if (innerHits.isEmpty() == false) {
builder.field(INNER_HITS_FIELD.getPreferredName(), innerHit); if (innerHits.size() == 1) {
builder.field(INNER_HITS_FIELD.getPreferredName(), innerHits.get(0));
} else {
builder.startArray(INNER_HITS_FIELD.getPreferredName());
for (InnerHitBuilder innerHit : innerHits) {
innerHit.toXContent(builder, ToXContent.EMPTY_PARAMS);
}
builder.endArray();
}
} }
} }
@ -168,14 +218,12 @@ public class CollapseBuilder extends ToXContentToBytes implements Writeable {
if (maxConcurrentGroupRequests != that.maxConcurrentGroupRequests) return false; if (maxConcurrentGroupRequests != that.maxConcurrentGroupRequests) return false;
if (!field.equals(that.field)) return false; if (!field.equals(that.field)) return false;
return innerHit != null ? innerHit.equals(that.innerHit) : that.innerHit == null; return Objects.equals(innerHits, that.innerHits);
} }
@Override @Override
public int hashCode() { public int hashCode() {
int result = field.hashCode(); int result = Objects.hash(field, innerHits);
result = 31 * result + (innerHit != null ? innerHit.hashCode() : 0);
result = 31 * result + maxConcurrentGroupRequests; result = 31 * result + maxConcurrentGroupRequests;
return result; return result;
} }
@ -204,10 +252,11 @@ public class CollapseBuilder extends ToXContentToBytes implements Writeable {
if (fieldType.hasDocValues() == false) { if (fieldType.hasDocValues() == false) {
throw new SearchContextException(context, "cannot collapse on field `" + field + "` without `doc_values`"); throw new SearchContextException(context, "cannot collapse on field `" + field + "` without `doc_values`");
} }
if (fieldType.indexOptions() == IndexOptions.NONE && innerHit != null) { if (fieldType.indexOptions() == IndexOptions.NONE && (innerHits != null && !innerHits.isEmpty())) {
throw new SearchContextException(context, "cannot expand `inner_hits` for collapse field `" throw new SearchContextException(context, "cannot expand `inner_hits` for collapse field `"
+ field + "`, " + "only indexed field can retrieve `inner_hits`"); + field + "`, " + "only indexed field can retrieve `inner_hits`");
} }
return new CollapseContext(fieldType, innerHit);
return new CollapseContext(fieldType, innerHits);
} }
} }

View File

@ -26,17 +26,24 @@ import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.InnerHitBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.List;
/** /**
* Context used for field collapsing * Context used for field collapsing
*/ */
public class CollapseContext { public class CollapseContext {
private final MappedFieldType fieldType; private final MappedFieldType fieldType;
private final InnerHitBuilder innerHit; private final List<InnerHitBuilder> innerHits;
public CollapseContext(MappedFieldType fieldType, InnerHitBuilder innerHit) { public CollapseContext(MappedFieldType fieldType, InnerHitBuilder innerHit) {
this.fieldType = fieldType; this.fieldType = fieldType;
this.innerHit = innerHit; this.innerHits = Collections.singletonList(innerHit);
}
public CollapseContext(MappedFieldType fieldType, List<InnerHitBuilder> innerHits) {
this.fieldType = fieldType;
this.innerHits = innerHits;
} }
/** The field type used for collapsing **/ /** The field type used for collapsing **/
@ -44,10 +51,9 @@ public class CollapseContext {
return fieldType; return fieldType;
} }
/** The inner hit options to expand the collapsed results **/ /** The inner hit options to expand the collapsed results **/
public InnerHitBuilder getInnerHit() { public List<InnerHitBuilder> getInnerHit() {
return innerHit; return innerHits;
} }
public CollapsingTopDocsCollector<?> createTopDocs(Sort sort, int topN, boolean trackMaxScore) throws IOException { public CollapsingTopDocsCollector<?> createTopDocs(Sort sort, int topN, boolean trackMaxScore) throws IOException {

View File

@ -36,25 +36,38 @@ import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class ExpandSearchPhaseTests extends ESTestCase { public class ExpandSearchPhaseTests extends ESTestCase {
public void testCollapseSingleHit() throws IOException { public void testCollapseSingleHit() throws IOException {
final int iters = randomIntBetween(5, 10); final int iters = randomIntBetween(5, 10);
for (int i = 0; i < iters; i++) { for (int i = 0; i < iters; i++) {
SearchHits collapsedHits = new SearchHits(new SearchHit[]{new SearchHit(2, "ID", new Text("type"), final int numInnerHits = randomIntBetween(1, 5);
Collections.emptyMap()), new SearchHit(3, "ID", new Text("type"), List<SearchHits> collapsedHits = new ArrayList<>(numInnerHits);
Collections.emptyMap())}, 1, 1.0F); for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(innerHitNum, "ID", new Text("type"),
Collections.emptyMap()), new SearchHit(innerHitNum + 1, "ID", new Text("type"),
Collections.emptyMap())}, 2, 1.0F);
collapsedHits.add(hits);
}
AtomicBoolean executedMultiSearch = new AtomicBoolean(false); AtomicBoolean executedMultiSearch = new AtomicBoolean(false);
QueryBuilder originalQuery = randomBoolean() ? null : QueryBuilders.termQuery("foo", "bar"); QueryBuilder originalQuery = randomBoolean() ? null : QueryBuilders.termQuery("foo", "bar");
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); final MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1);
String collapseValue = randomBoolean() ? null : "boom"; String collapseValue = randomBoolean() ? null : "boom";
mockSearchPhaseContext.getRequest().source(new SearchSourceBuilder() mockSearchPhaseContext.getRequest().source(new SearchSourceBuilder()
.collapse(new CollapseBuilder("someField").setInnerHits(new InnerHitBuilder().setName("foobarbaz")))); .collapse(new CollapseBuilder("someField")
.setInnerHits(IntStream.range(0, numInnerHits).mapToObj(hitNum -> new InnerHitBuilder().setName("innerHit" + hitNum))
.collect(Collectors.toList()))));
mockSearchPhaseContext.getRequest().source().query(originalQuery); mockSearchPhaseContext.getRequest().source().query(originalQuery);
mockSearchPhaseContext.searchTransport = new SearchTransportService( mockSearchPhaseContext.searchTransport = new SearchTransportService(
Settings.builder().put("search.remote.connect", false).build(), null) { Settings.builder().put("search.remote.connect", false).build(), null) {
@ -62,9 +75,10 @@ public class ExpandSearchPhaseTests extends ESTestCase {
@Override @Override
void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener<MultiSearchResponse> listener) { void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener<MultiSearchResponse> listener) {
assertTrue(executedMultiSearch.compareAndSet(false, true)); assertTrue(executedMultiSearch.compareAndSet(false, true));
assertEquals(1, request.requests().size()); assertEquals(numInnerHits, request.requests().size());
SearchRequest searchRequest = request.requests().get(0); SearchRequest searchRequest = request.requests().get(0);
assertTrue(searchRequest.source().query() instanceof BoolQueryBuilder); assertTrue(searchRequest.source().query() instanceof BoolQueryBuilder);
BoolQueryBuilder groupBuilder = (BoolQueryBuilder) searchRequest.source().query(); BoolQueryBuilder groupBuilder = (BoolQueryBuilder) searchRequest.source().query();
if (collapseValue == null) { if (collapseValue == null) {
assertThat(groupBuilder.mustNot(), Matchers.contains(QueryBuilders.existsQuery("someField"))); assertThat(groupBuilder.mustNot(), Matchers.contains(QueryBuilders.existsQuery("someField")));
@ -78,13 +92,15 @@ public class ExpandSearchPhaseTests extends ESTestCase {
assertArrayEquals(mockSearchPhaseContext.getRequest().types(), searchRequest.types()); assertArrayEquals(mockSearchPhaseContext.getRequest().types(), searchRequest.types());
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(collapsedHits, List<MultiSearchResponse.Item> mSearchResponses = new ArrayList<>(numInnerHits);
null, null, null, false, null, 1); for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
SearchResponse response = mockSearchPhaseContext.buildSearchResponse(internalSearchResponse, null); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(collapsedHits.get(innerHitNum),
listener.onResponse(new MultiSearchResponse(new MultiSearchResponse.Item[]{ null, null, null, false, null, 1);
new MultiSearchResponse.Item(response, null) SearchResponse response = mockSearchPhaseContext.buildSearchResponse(internalSearchResponse, null);
})); mSearchResponses.add(new MultiSearchResponse.Item(response, null));
}
listener.onResponse(new MultiSearchResponse(mSearchResponses.toArray(new MultiSearchResponse.Item[0])));
} }
}; };
@ -108,8 +124,12 @@ public class ExpandSearchPhaseTests extends ESTestCase {
assertNotNull(reference.get()); assertNotNull(reference.get());
SearchResponse theResponse = reference.get(); SearchResponse theResponse = reference.get();
assertSame(theResponse, response); assertSame(theResponse, response);
assertEquals(1, theResponse.getHits().getHits()[0].getInnerHits().size()); assertEquals(numInnerHits, theResponse.getHits().getHits()[0].getInnerHits().size());
assertSame(theResponse.getHits().getHits()[0].getInnerHits().get("foobarbaz"), collapsedHits);
for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
assertSame(theResponse.getHits().getHits()[0].getInnerHits().get("innerHit" + innerHitNum), collapsedHits.get(innerHitNum));
}
assertTrue(executedMultiSearch.get()); assertTrue(executedMultiSearch.get());
assertEquals(1, mockSearchPhaseContext.phasesExecuted.get()); assertEquals(1, mockSearchPhaseContext.phasesExecuted.get());
} }

View File

@ -26,30 +26,38 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.InnerHitBuilderTests; import org.elasticsearch.index.query.InnerHitBuilderTests;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchContextException; import org.elasticsearch.search.SearchContextException;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.AbstractSerializingTestCase;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static java.util.Collections.emptyList; import static java.util.Collections.emptyList;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
public class CollapseBuilderTests extends AbstractWireSerializingTestCase { public class CollapseBuilderTests extends AbstractSerializingTestCase<CollapseBuilder> {
private static NamedWriteableRegistry namedWriteableRegistry; private static NamedWriteableRegistry namedWriteableRegistry;
private static NamedXContentRegistry xContentRegistry; private static NamedXContentRegistry xContentRegistry;
@ -67,17 +75,30 @@ public class CollapseBuilderTests extends AbstractWireSerializingTestCase {
} }
public static CollapseBuilder randomCollapseBuilder() { public static CollapseBuilder randomCollapseBuilder() {
return randomCollapseBuilder(true);
}
public static CollapseBuilder randomCollapseBuilder(boolean multiInnerHits) {
CollapseBuilder builder = new CollapseBuilder(randomAlphaOfLength(10)); CollapseBuilder builder = new CollapseBuilder(randomAlphaOfLength(10));
builder.setMaxConcurrentGroupRequests(randomIntBetween(1, 48)); builder.setMaxConcurrentGroupRequests(randomIntBetween(1, 48));
if (randomBoolean()) { int numInnerHits = randomIntBetween(0, multiInnerHits ? 5 : 1);
if (numInnerHits == 1) {
InnerHitBuilder innerHit = InnerHitBuilderTests.randomInnerHits(); InnerHitBuilder innerHit = InnerHitBuilderTests.randomInnerHits();
builder.setInnerHits(innerHit); builder.setInnerHits(innerHit);
} else if (numInnerHits > 1) {
List<InnerHitBuilder> innerHits = new ArrayList<>(numInnerHits);
for (int i = 0; i < numInnerHits; i++) {
innerHits.add(InnerHitBuilderTests.randomInnerHits());
}
builder.setInnerHits(innerHits);
} }
return builder; return builder;
} }
@Override @Override
protected Writeable createTestInstance() { protected CollapseBuilder createTestInstance() {
return randomCollapseBuilder(); return randomCollapseBuilder();
} }
@ -177,4 +198,26 @@ public class CollapseBuilderTests extends AbstractWireSerializingTestCase {
assertEquals(exc.getMessage(), "unknown type for collapse field `field`, only keywords and numbers are accepted"); assertEquals(exc.getMessage(), "unknown type for collapse field `field`, only keywords and numbers are accepted");
} }
} }
@Override
protected CollapseBuilder doParseInstance(XContentParser parser) throws IOException {
return CollapseBuilder.fromXContent(new QueryParseContext(parser));
}
/**
* Rewrite this test to disable xcontent shuffling on the highlight builder
*/
public void testFromXContent() throws IOException {
for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) {
CollapseBuilder testInstance = createTestInstance();
XContentType xContentType = randomFrom(XContentType.values());
XContentBuilder builder = toXContent(testInstance, xContentType);
XContentBuilder shuffled = shuffleXContent(builder, "fields");
assertParsedInstance(xContentType, shuffled.bytes(), testInstance);
for (Map.Entry<String, CollapseBuilder> alternateVersion : getAlternateVersions().entrySet()) {
String instanceAsString = alternateVersion.getKey();
assertParsedInstance(XContentType.JSON, new BytesArray(instanceAsString), alternateVersion.getValue());
}
}
}
} }

View File

@ -70,8 +70,46 @@ GET /twitter/tweet/_search
See <<search-request-inner-hits, inner hits>> for the complete list of supported options and the format of the response. See <<search-request-inner-hits, inner hits>> for the complete list of supported options and the format of the response.
It is also possible to request multiple `inner_hits` for each collapsed hit. This can be useful when you want to get
multiple representations of the collapsed hits.
[source,js]
--------------------------------------------------
GET /twitter/tweet/_search
{
"query": {
"match": {
"message": "elasticsearch"
}
},
"collapse" : {
"field" : "user", <1>
"inner_hits": [
{
"name": "most_liked", <2>
"size": 3,
"sort": ["likes"]
},
{
"name": "most_recent", <3>
"size": 3,
"sort": [{ "date": "asc" }]
}
]
},
"sort": ["likes"]
}
--------------------------------------------------
// CONSOLE
// TEST[setup:twitter]
<1> collapse the result set using the "user" field
<2> return the three most liked tweets for the user
<3> return the three most recent tweets for the user
The expansion of the group is done by sending an additional query for each The expansion of the group is done by sending an additional query for each
collapsed hit returned in the response. `inner_hit` request for each collapsed hit returned in the response. This can significantly slow things down
if you have too many groups and/or `inner_hit` requests.
The `max_concurrent_group_searches` request parameter can be used to control The `max_concurrent_group_searches` request parameter can be used to control
the maximum number of concurrent searches allowed in this phase. the maximum number of concurrent searches allowed in this phase.
The default is based on the number of data nodes and the default search thread pool size. The default is based on the number of data nodes and the default search thread pool size.

View File

@ -107,8 +107,8 @@ setup:
"field collapsing and inner_hits": "field collapsing and inner_hits":
- skip: - skip:
version: " - 5.2.99" version: " - 5.99.99"
reason: this uses a new API that has been added in 5.3 reason: disable this test temporary due to a pending backport (#24517)
- do: - do:
search: search:
@ -265,3 +265,62 @@ setup:
- match: { hits.total: 6 } - match: { hits.total: 6 }
- length: { hits.hits: 0 } - length: { hits.hits: 0 }
---
"field collapsing and multiple inner_hits":
- skip:
version: " - 5.99.99"
reason: TODO version should be 5.4.99 after backport (#24517)
- do:
search:
index: test
type: test
body:
collapse: {
field: numeric_group,
inner_hits: [
{ name: sub_hits_asc, size: 2, sort: [{ sort: asc }] },
{ name: sub_hits_desc, size: 1, sort: [{ sort: desc }] }
]
}
sort: [{ sort: desc }]
- match: { hits.total: 6 }
- length: { hits.hits: 3 }
- match: { hits.hits.0._index: test }
- match: { hits.hits.0._type: test }
- match: { hits.hits.0.fields.numeric_group: [3] }
- match: { hits.hits.0.sort: [36] }
- match: { hits.hits.0._id: "6" }
- match: { hits.hits.0.inner_hits.sub_hits_asc.hits.total: 1 }
- length: { hits.hits.0.inner_hits.sub_hits_asc.hits.hits: 1 }
- match: { hits.hits.0.inner_hits.sub_hits_asc.hits.hits.0._id: "6" }
- match: { hits.hits.0.inner_hits.sub_hits_desc.hits.total: 1 }
- length: { hits.hits.0.inner_hits.sub_hits_desc.hits.hits: 1 }
- match: { hits.hits.0.inner_hits.sub_hits_desc.hits.hits.0._id: "6" }
- match: { hits.hits.1._index: test }
- match: { hits.hits.1._type: test }
- match: { hits.hits.1.fields.numeric_group: [1] }
- match: { hits.hits.1.sort: [24] }
- match: { hits.hits.1._id: "3" }
- match: { hits.hits.1.inner_hits.sub_hits_asc.hits.total: 3 }
- length: { hits.hits.1.inner_hits.sub_hits_asc.hits.hits: 2 }
- match: { hits.hits.1.inner_hits.sub_hits_asc.hits.hits.0._id: "2" }
- match: { hits.hits.1.inner_hits.sub_hits_asc.hits.hits.1._id: "1" }
- match: { hits.hits.1.inner_hits.sub_hits_desc.hits.total: 3 }
- length: { hits.hits.1.inner_hits.sub_hits_desc.hits.hits: 1 }
- match: { hits.hits.1.inner_hits.sub_hits_desc.hits.hits.0._id: "3" }
- match: { hits.hits.2._index: test }
- match: { hits.hits.2._type: test }
- match: { hits.hits.2.fields.numeric_group: [25] }
- match: { hits.hits.2.sort: [10] }
- match: { hits.hits.2._id: "4" }
- match: { hits.hits.2.inner_hits.sub_hits_asc.hits.total: 2 }
- length: { hits.hits.2.inner_hits.sub_hits_asc.hits.hits: 2 }
- match: { hits.hits.2.inner_hits.sub_hits_asc.hits.hits.0._id: "5" }
- match: { hits.hits.2.inner_hits.sub_hits_asc.hits.hits.1._id: "4" }
- match: { hits.hits.2.inner_hits.sub_hits_desc.hits.total: 2 }
- length: { hits.hits.2.inner_hits.sub_hits_desc.hits.hits: 1 }
- match: { hits.hits.2.inner_hits.sub_hits_desc.hits.hits.0._id: "4" }

View File

@ -18,6 +18,7 @@
*/ */
package org.elasticsearch.test; package org.elasticsearch.test;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
@ -93,7 +94,11 @@ public abstract class AbstractWireSerializingTestCase<T extends Writeable> exten
* Serialize the given instance and asserts that both are equal * Serialize the given instance and asserts that both are equal
*/ */
protected T assertSerialization(T testInstance) throws IOException { protected T assertSerialization(T testInstance) throws IOException {
T deserializedInstance = copyInstance(testInstance); return assertSerialization(testInstance, Version.CURRENT);
}
protected T assertSerialization(T testInstance, Version version) throws IOException {
T deserializedInstance = copyInstance(testInstance, version);
assertEquals(testInstance, deserializedInstance); assertEquals(testInstance, deserializedInstance);
assertEquals(testInstance.hashCode(), deserializedInstance.hashCode()); assertEquals(testInstance.hashCode(), deserializedInstance.hashCode());
assertNotSame(testInstance, deserializedInstance); assertNotSame(testInstance, deserializedInstance);
@ -101,10 +106,16 @@ public abstract class AbstractWireSerializingTestCase<T extends Writeable> exten
} }
protected T copyInstance(T instance) throws IOException { protected T copyInstance(T instance) throws IOException {
return copyInstance(instance, Version.CURRENT);
}
protected T copyInstance(T instance, Version version) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) { try (BytesStreamOutput output = new BytesStreamOutput()) {
output.setVersion(version);
instance.writeTo(output); instance.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(),
getNamedWriteableRegistry())) { getNamedWriteableRegistry())) {
in.setVersion(version);
return instanceReader().read(in); return instanceReader().read(in);
} }
} }