Internal: Remove XCollector.
We don't actually need this interface, we can just run the post-collection operation once collection is finished on impls that need it. Close #9677
This commit is contained in:
parent
460e8d34fc
commit
9f4c56a7b8
|
@ -28,7 +28,7 @@ import java.io.IOException;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class FilteredCollector implements XCollector {
|
||||
public class FilteredCollector implements Collector {
|
||||
|
||||
private final Collector collector;
|
||||
private final Filter filter;
|
||||
|
@ -38,59 +38,15 @@ public class FilteredCollector implements XCollector {
|
|||
this.filter = filter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postCollection() throws IOException {
|
||||
if (collector instanceof XCollector) {
|
||||
((XCollector) collector).postCollection();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
|
||||
final DocIdSet set = filter.getDocIdSet(context, null);
|
||||
final LeafCollector in = collector.getLeafCollector(context);
|
||||
final Bits bits = set == null ? null : set.bits();
|
||||
|
||||
if (bits != null) {
|
||||
// the filter supports random-access
|
||||
return new FilterLeafCollector(in) {
|
||||
public void collect(int doc) throws IOException {
|
||||
if (bits.get(doc)) {
|
||||
in.collect(doc);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// No random-access support, use the iterator and force in-order scoring
|
||||
final DocIdSetIterator iterator;
|
||||
if (DocIdSets.isEmpty(set)) {
|
||||
iterator = null;
|
||||
} else {
|
||||
// DIS.iterator might still return null here
|
||||
iterator = set.iterator();
|
||||
}
|
||||
|
||||
if (iterator == null) {
|
||||
return new FilterLeafCollector(in) {
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
// no-op
|
||||
}
|
||||
};
|
||||
}
|
||||
final Bits bits = DocIdSets.asSequentialAccessBits(context.reader().maxDoc(), set);
|
||||
|
||||
return new FilterLeafCollector(in) {
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
final int itDoc = iterator.docID();
|
||||
if (itDoc > doc) {
|
||||
return;
|
||||
} else if (itDoc < doc) {
|
||||
if (iterator.advance(doc) == doc) {
|
||||
in.collect(doc);
|
||||
}
|
||||
} else {
|
||||
if (bits.get(doc)) {
|
||||
in.collect(doc);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.lucene.search;
|
||||
|
||||
import org.apache.lucene.search.Collector;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* An extension to {@link Collector} that allows for a callback when
|
||||
* collection is done.
|
||||
*/
|
||||
public interface XCollector extends Collector {
|
||||
|
||||
public void postCollection() throws IOException;
|
||||
}
|
|
@ -19,11 +19,18 @@
|
|||
package org.elasticsearch.percolator;
|
||||
|
||||
import com.carrotsearch.hppc.ByteObjectOpenHashMap;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.ReaderUtil;
|
||||
import org.apache.lucene.index.memory.ExtendedMemoryIndex;
|
||||
import org.apache.lucene.index.memory.MemoryIndex;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.FilteredQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.CloseableThreadLocal;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
@ -42,7 +49,6 @@ import org.elasticsearch.common.component.AbstractComponent;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.XCollector;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.BytesText;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
|
@ -68,7 +74,10 @@ import org.elasticsearch.index.query.ParsedQuery;
|
|||
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.percolator.QueryCollector.*;
|
||||
import org.elasticsearch.percolator.QueryCollector.Count;
|
||||
import org.elasticsearch.percolator.QueryCollector.Match;
|
||||
import org.elasticsearch.percolator.QueryCollector.MatchAndScore;
|
||||
import org.elasticsearch.percolator.QueryCollector.MatchAndSort;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
|
@ -86,7 +95,9 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.SourceToParse.source;
|
||||
import static org.elasticsearch.percolator.QueryCollector.*;
|
||||
import static org.elasticsearch.percolator.QueryCollector.count;
|
||||
import static org.elasticsearch.percolator.QueryCollector.match;
|
||||
import static org.elasticsearch.percolator.QueryCollector.matchAndScore;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.apache.lucene.search.TopScoreDocCollector;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.search.XCollector;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
|
@ -66,7 +65,7 @@ abstract class QueryCollector extends SimpleCollector {
|
|||
|
||||
SortedBinaryDocValues values;
|
||||
|
||||
final XCollector aggregatorCollector;
|
||||
final BucketCollector aggregatorCollector;
|
||||
LeafCollector aggregatorLeafCollector;
|
||||
|
||||
QueryCollector(ESLogger logger, PercolateContext context, boolean isNestedDoc) throws IOException {
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.search.Query;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.lucene.search.XCollector;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchPhase;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator;
|
||||
|
@ -114,7 +113,7 @@ public class AggregationPhase implements SearchPhase {
|
|||
|
||||
// optimize the global collector based execution
|
||||
if (!globals.isEmpty()) {
|
||||
XCollector collector = BucketCollector.wrap(globals);
|
||||
BucketCollector collector = BucketCollector.wrap(globals);
|
||||
Query query = new ConstantScoreQuery(Queries.MATCH_ALL_FILTER);
|
||||
Filter searchFilter = context.searchFilter(context.types());
|
||||
if (searchFilter != null) {
|
||||
|
@ -122,7 +121,6 @@ public class AggregationPhase implements SearchPhase {
|
|||
}
|
||||
try {
|
||||
context.searcher().search(query, collector);
|
||||
collector.postCollection();
|
||||
} catch (Exception e) {
|
||||
throw new QueryPhaseExecutionException(context, "Failed to execute global aggregators", e);
|
||||
}
|
||||
|
@ -131,6 +129,7 @@ public class AggregationPhase implements SearchPhase {
|
|||
List<InternalAggregation> aggregations = new ArrayList<>(aggregators.length);
|
||||
for (Aggregator aggregator : context.aggregations().aggregators()) {
|
||||
try {
|
||||
aggregator.postCollection();
|
||||
aggregations.add(aggregator.buildAggregation(0));
|
||||
} catch (IOException e) {
|
||||
throw new AggregationExecutionException("Failed to build aggregation [" + aggregator.name() + "]", e);
|
||||
|
|
|
@ -23,7 +23,7 @@ package org.elasticsearch.search.aggregations;
|
|||
import com.google.common.collect.Iterables;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.common.lucene.search.XCollector;
|
||||
import org.apache.lucene.search.Collector;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -32,18 +32,7 @@ import java.util.List;
|
|||
/**
|
||||
* A Collector that can collect data in separate buckets.
|
||||
*/
|
||||
public abstract class BucketCollector implements XCollector {
|
||||
|
||||
/**
|
||||
* Used to gather a summary from a bucket
|
||||
*/
|
||||
public interface BucketAnalysisCollector{
|
||||
/**
|
||||
* Used to ask {@link BucketCollector}s for their analysis of the content collected in a bucket
|
||||
* @param analysis an object that represents the summary of a bucket as an {@link Aggregation}
|
||||
*/
|
||||
void add(Aggregation aggregation);
|
||||
}
|
||||
public abstract class BucketCollector implements Collector {
|
||||
|
||||
public final static BucketCollector NO_OP_COLLECTOR = new BucketCollector() {
|
||||
|
||||
|
@ -120,7 +109,9 @@ public abstract class BucketCollector implements XCollector {
|
|||
*/
|
||||
public abstract void preCollection() throws IOException;
|
||||
|
||||
@Override
|
||||
/**
|
||||
* Post-collection callback.
|
||||
*/
|
||||
public abstract void postCollection() throws IOException;
|
||||
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.lease.Releasable;
|
|||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.MinimumScoreCollector;
|
||||
import org.elasticsearch.common.lucene.search.FilteredCollector;
|
||||
import org.elasticsearch.common.lucene.search.XCollector;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.search.dfs.CachedDfSource;
|
||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
||||
|
@ -182,16 +181,6 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
|
|||
} else {
|
||||
super.search(leaves, weight, collector);
|
||||
}
|
||||
|
||||
if (currentState == Stage.MAIN_QUERY) {
|
||||
if (queryCollectors != null && !queryCollectors.isEmpty()) {
|
||||
for (Collector queryCollector : queryCollectors.values()) {
|
||||
if (queryCollector instanceof XCollector) {
|
||||
((XCollector) queryCollector).postCollection();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
searchContext.clearReleasables(Lifetime.COLLECTION);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue