Introdue ParentIdCollector that collects only if the parent ID is non null ie. if the document has a parent.
Closes #2744
This commit is contained in:
parent
7961dfa7ab
commit
237c4ddf54
|
@ -319,30 +319,25 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
|
|||
}
|
||||
}
|
||||
|
||||
static class ChildUidCollector extends NoopCollector {
|
||||
static class ChildUidCollector extends ParentIdCollector {
|
||||
|
||||
final TObjectFloatHashMap<HashedBytesArray> uidToScore;
|
||||
final ScoreType scoreType;
|
||||
final SearchContext searchContext;
|
||||
final String childType;
|
||||
|
||||
Scorer scorer;
|
||||
IdReaderTypeCache typeCache;
|
||||
|
||||
ChildUidCollector(ScoreType scoreType, SearchContext searchContext, String childType, TObjectFloatHashMap<HashedBytesArray> uidToScore) {
|
||||
super(childType, searchContext);
|
||||
this.uidToScore = uidToScore;
|
||||
this.scoreType = scoreType;
|
||||
this.searchContext = searchContext;
|
||||
this.childType = childType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
if (typeCache == null) {
|
||||
return;
|
||||
}
|
||||
public void setScorer(Scorer scorer) throws IOException {
|
||||
this.scorer = scorer;
|
||||
}
|
||||
|
||||
HashedBytesArray parentUid = typeCache.parentIdByDoc(doc);
|
||||
@Override
|
||||
protected void collect(int doc, HashedBytesArray parentUid) throws IOException {
|
||||
float previousScore = uidToScore.get(parentUid);
|
||||
float currentScore = scorer.score();
|
||||
if (previousScore == 0) {
|
||||
|
@ -357,23 +352,19 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
|
|||
uidToScore.put(parentUid, currentScore);
|
||||
}
|
||||
break;
|
||||
case AVG:
|
||||
assert false : "AVG has it's own collector";
|
||||
|
||||
default:
|
||||
assert false : "Are we missing a sore type here? -- " + scoreType;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) throws IOException {
|
||||
this.scorer = scorer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext context) throws IOException {
|
||||
typeCache = searchContext.idCache().reader(context.reader()).type(childType);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class AvgChildUidCollector extends ChildUidCollector {
|
||||
final static class AvgChildUidCollector extends ChildUidCollector {
|
||||
|
||||
final TObjectIntHashMap<HashedBytesArray> uidToCount;
|
||||
|
||||
|
@ -384,12 +375,7 @@ public class ChildrenQuery extends Query implements SearchContext.Rewrite {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
if (typeCache == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
HashedBytesArray parentUid = typeCache.parentIdByDoc(doc);
|
||||
protected void collect(int doc, HashedBytesArray parentUid) throws IOException {
|
||||
float previousScore = uidToScore.get(parentUid);
|
||||
float currentScore = scorer.score();
|
||||
if (previousScore == 0) {
|
||||
|
|
|
@ -102,7 +102,7 @@ public abstract class HasChildFilter extends Filter implements SearchContext.Rew
|
|||
collectedUids = null;
|
||||
}
|
||||
|
||||
static class ParentDocSet extends MatchDocIdSet {
|
||||
final static class ParentDocSet extends MatchDocIdSet {
|
||||
|
||||
final IndexReader reader;
|
||||
final THashSet<HashedBytesArray> parents;
|
||||
|
@ -121,33 +121,19 @@ public abstract class HasChildFilter extends Filter implements SearchContext.Rew
|
|||
}
|
||||
}
|
||||
|
||||
static class UidCollector extends NoopCollector {
|
||||
|
||||
final String parentType;
|
||||
final SearchContext context;
|
||||
final THashSet<HashedBytesArray> collectedUids;
|
||||
|
||||
private IdReaderTypeCache typeCache;
|
||||
final static class UidCollector extends ParentIdCollector {
|
||||
private final THashSet<HashedBytesArray> collectedUids;
|
||||
|
||||
UidCollector(String parentType, SearchContext context, THashSet<HashedBytesArray> collectedUids) {
|
||||
this.parentType = parentType;
|
||||
this.context = context;
|
||||
super(parentType, context);
|
||||
this.collectedUids = collectedUids;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
|
||||
if (typeCache != null) {
|
||||
collectedUids.add(typeCache.parentIdByDoc(doc));
|
||||
}
|
||||
|
||||
public void collect(int doc, HashedBytesArray parentIdByDoc){
|
||||
collectedUids.add(parentIdByDoc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
||||
typeCache = context.idCache().reader(readerContext.reader()).type(parentType);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.search.child;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.elasticsearch.common.bytes.HashedBytesArray;
|
||||
import org.elasticsearch.common.lucene.search.NoopCollector;
|
||||
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
/**
|
||||
* A simple collector that only collects if the docs parent ID is not
|
||||
* <code>null</code>
|
||||
*/
|
||||
abstract class ParentIdCollector extends NoopCollector {
|
||||
protected final String type;
|
||||
protected final SearchContext context;
|
||||
private IdReaderTypeCache typeCache;
|
||||
|
||||
protected ParentIdCollector(String parentType, SearchContext context) {
|
||||
this.type = parentType;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void collect(int doc) throws IOException {
|
||||
if (typeCache != null) {
|
||||
HashedBytesArray parentIdByDoc = typeCache.parentIdByDoc(doc);
|
||||
if (parentIdByDoc != null) {
|
||||
collect(doc, parentIdByDoc);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void collect(int doc, HashedBytesArray parentId) throws IOException;
|
||||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext readerContext) throws IOException {
|
||||
typeCache = context.idCache().reader(readerContext.reader()).type(type);
|
||||
}
|
||||
}
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.test.integration.search.child;
|
||||
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.action.count.CountResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
|
@ -35,6 +36,7 @@ import org.testng.annotations.AfterClass;
|
|||
import org.testng.annotations.BeforeClass;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
@ -110,6 +112,33 @@ public class SimpleChildQuerySearchTests extends AbstractNodesTests {
|
|||
assertThat("Failures " + Arrays.toString(searchResponse.getShardFailures()), searchResponse.getShardFailures().length, equalTo(0));
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||
}
|
||||
|
||||
|
||||
@Test // see #2744
|
||||
public void test2744() throws ElasticSearchException, IOException {
|
||||
client.admin().indices().prepareDelete().execute().actionGet();
|
||||
|
||||
client.admin().indices().prepareCreate("test")
|
||||
.setSettings(
|
||||
ImmutableSettings.settingsBuilder()
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 0)
|
||||
).execute().actionGet();
|
||||
client.admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
|
||||
client.admin().indices().preparePutMapping("test").setType("test").setSource(jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_parent").field("type", "foo").endObject()
|
||||
.endObject().endObject()).execute().actionGet();
|
||||
|
||||
// index simple data
|
||||
client.prepareIndex("test", "foo", "1").setSource("foo", 1).execute().actionGet();
|
||||
client.prepareIndex("test", "test").setSource("foo", 1).setParent("1").execute().actionGet();
|
||||
client.admin().indices().prepareRefresh().execute().actionGet();
|
||||
SearchResponse searchResponse = client.prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1))).execute().actionGet();
|
||||
assertThat(searchResponse.getFailedShards(), equalTo(0));
|
||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void simpleChildQuery() throws Exception {
|
||||
|
|
Loading…
Reference in New Issue