Replace static FacetsCollector#search methods (#13733)

We have a few public static utility search methods in FacetsCollector that accept
a Collector as last argument. In practice, these are expected to be called
providing a `FacetsCollector` as last argument. Also, we'd like to remove all
the search methods that take a `Collector` in favour of those that take a
`CollectorManager` (see #12892).

This commit adds the corresponding functionality to `FacetsCollectorManager`.
The new methods take a `FacetsCollectorManager` as last argument. The return type
has to be adapted to include also the facets results that were before made
available through the collector argument.

In order for tests to all work I had to add support for `keepScores` to
`FacetsCollectorManager` which was missing.

Closes #13725
This commit is contained in:
Luca Cavanna 2024-09-06 22:46:59 +02:00 committed by GitHub
parent 0ec453d485
commit 47c0a6ed18
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 266 additions and 184 deletions

View File

@ -114,6 +114,9 @@ API Changes
* GITHUB#13708: Move Operations.sameLanguage/subsetOf to test-framework. (Robert Muir)
* GITHUB#13733: Move FacetsCollector#search utility methods to `FacetsCollectorManager`, replace the `Collector`
argument with a `FacetsCollectorManager` and update the return type to include both `TopDocs` results as well as
facets results. (Luca Cavanna)
New Features
---------------------

View File

@ -793,6 +793,13 @@ Specifically, the method `FunctionValues#getScorer(Weight weight, LeafReaderCont
Callers must now keep track of the Weight instance that created the Scorer if they need it, instead of relying on
Scorer.
### `FacetsCollector#search` utility methods moved and updated
The static `search` methods exposed by `FacetsCollector` have been moved to `FacetsCollectorManager`.
Furthermore, they take a `FacetsCollectorManager` last argument in place of a `Collector` so that they support
intra query concurrency. The return type has also be updated to `FacetsCollectorManager.FacetsResult` which includes
both `TopDocs` as well as facets results included in a reduced `FacetsCollector` instance.
### `SearchWithCollectorTask` no longer supports the `collector.class` config parameter
`collector.class` used to allow users to load a custom collector implementation. `collector.manager.class`

View File

@ -25,6 +25,7 @@ import org.apache.lucene.facet.DrillDownQuery;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsCollectorManager;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.LabelAndValue;
import org.apache.lucene.facet.taxonomy.AssociationAggregationFunction;
@ -97,12 +98,13 @@ public class AssociationsFacetsExample {
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
FacetsCollector fc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query:
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc);
FacetsCollectorManager.FacetsResult facetsResult =
FacetsCollectorManager.search(
searcher, new MatchAllDocsQuery(), 10, new FacetsCollectorManager());
FacetsCollector fc = facetsResult.facetsCollector();
Facets tags =
new TaxonomyFacetIntAssociations(
@ -133,8 +135,8 @@ public class AssociationsFacetsExample {
// Now user drills down on Publish Date/2010:
q.add("tags", "solr");
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(searcher, q, 10, fc);
FacetsCollectorManager fcm = new FacetsCollectorManager();
FacetsCollector fc = FacetsCollectorManager.search(searcher, q, 10, fcm).facetsCollector();
// Retrieve results
Facets facets =

View File

@ -30,6 +30,7 @@ import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsCollectorManager;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.taxonomy.AssociationAggregationFunction;
import org.apache.lucene.facet.taxonomy.TaxonomyFacetFloatAssociations;
@ -97,12 +98,13 @@ public class ExpressionAggregationFacetsExample {
DoubleValuesSource.fromLongField("popularity")); // the value of the 'popularity' field
// Aggregates the facet values
FacetsCollector fc = new FacetsCollector(true);
FacetsCollectorManager fcm = new FacetsCollectorManager(true);
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query:
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(searcher, new MatchAllDocsQuery(), 10, fcm).facetsCollector();
// Retrieve results
Facets facets =

View File

@ -25,6 +25,7 @@ import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsCollectorManager;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.taxonomy.FastTaxonomyFacetCounts;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
@ -97,12 +98,13 @@ public class MultiCategoryListsFacetsExample {
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
FacetsCollector fc = new FacetsCollector();
FacetsCollectorManager fcm = new FacetsCollectorManager();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query:
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(searcher, new MatchAllDocsQuery(), 10, fcm).facetsCollector();
// Retrieve results
List<FacetResult> results = new ArrayList<>();

View File

@ -30,6 +30,7 @@ import org.apache.lucene.facet.DrillSideways;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsCollectorManager;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.range.LongRange;
import org.apache.lucene.facet.range.LongRangeFacetCounts;
@ -115,13 +116,13 @@ public class RangeFacetsExample implements Closeable {
/** User runs a query and counts facets. */
public FacetResult search() throws IOException {
// Aggregates the facet counts
FacetsCollector fc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query:
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(
searcher, new MatchAllDocsQuery(), 10, new FacetsCollectorManager())
.facetsCollector();
Facets facets = new LongRangeFacetCounts("timestamp", fc, PAST_HOUR, PAST_SIX_HOURS, PAST_DAY);
return facets.getAllChildren("timestamp");
@ -131,12 +132,13 @@ public class RangeFacetsExample implements Closeable {
public FacetResult searchTopChildren() throws IOException {
// Aggregates the facet counts
FacetsCollector fc = new FacetsCollector();
FacetsCollectorManager fcm = new FacetsCollectorManager();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query:
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(searcher, new MatchAllDocsQuery(), 10, fcm).facetsCollector();
Facets facets = new LongRangeFacetCounts("error timestamp", fc, logTimestampRanges);
return facets.getTopChildren(10, "error timestamp");

View File

@ -99,12 +99,13 @@ public class SimpleFacetsExample {
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
FacetsCollector fc = new FacetsCollector();
FacetsCollectorManager fcm = new FacetsCollectorManager();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query:
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(searcher, new MatchAllDocsQuery(), 10, fcm).facetsCollector();
// Retrieve results
List<FacetResult> results = new ArrayList<>();
@ -156,8 +157,8 @@ public class SimpleFacetsExample {
// Now user drills down on Publish Date/2010:
q.add("Publish Date", "2010");
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(searcher, q, 10, fc);
FacetsCollectorManager fcm = new FacetsCollectorManager();
FacetsCollector fc = FacetsCollectorManager.search(searcher, q, 10, fcm).facetsCollector();
// Retrieve results
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc);

View File

@ -25,6 +25,7 @@ import org.apache.lucene.facet.DrillDownQuery;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsCollectorManager;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.sortedset.DefaultSortedSetDocValuesReaderState;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesFacetCounts;
@ -92,12 +93,13 @@ public class SimpleSortedSetFacetsExample {
new DefaultSortedSetDocValuesReaderState(indexReader, config);
// Aggregates the facet counts
FacetsCollector fc = new FacetsCollector();
FacetsCollectorManager fcm = new FacetsCollectorManager();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query:
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(searcher, new MatchAllDocsQuery(), 10, fcm).facetsCollector();
// Retrieve results
Facets facets = new SortedSetDocValuesFacetCounts(state, fc);
@ -120,8 +122,8 @@ public class SimpleSortedSetFacetsExample {
// Now user drills down on Publish Year/2010:
DrillDownQuery q = new DrillDownQuery(config);
q.add("Publish Year", "2010");
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(searcher, q, 10, fc);
FacetsCollectorManager fcm = new FacetsCollectorManager();
FacetsCollector fc = FacetsCollectorManager.search(searcher, q, 10, fcm).facetsCollector();
// Retrieve results
Facets facets = new SortedSetDocValuesFacetCounts(state, fc);

View File

@ -27,6 +27,7 @@ import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsCollectorManager;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.StringDocValuesReaderState;
import org.apache.lucene.facet.StringValueFacetCounts;
@ -96,12 +97,13 @@ public class StringValueFacetCountsExample {
new StringDocValuesReaderState(indexReader, "Publish Year");
// Aggregates the facet counts
FacetsCollector fc = new FacetsCollector();
FacetsCollectorManager fcm = new FacetsCollectorManager();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query:
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(searcher, new MatchAllDocsQuery(), 10, fcm).facetsCollector();
// Retrieve results
Facets authorFacets = new StringValueFacetCounts(authorState, fc);

View File

@ -198,9 +198,9 @@
* org.apache.lucene.search.Collector}, and as such can be passed to the search() method of Lucene's
* {@link org.apache.lucene.search.IndexSearcher}. In case the application also needs to collect
* documents (in addition to accumulating/collecting facets), you can use one of {@link
* org.apache.lucene.facet.FacetsCollector#search(org.apache.lucene.search.IndexSearcher,
* org.apache.lucene.search.Query, int, org.apache.lucene.search.Collector)
* FacetsCollector.search(...)} utility methods.
* org.apache.lucene.facet.FacetsCollectorManager#search(org.apache.lucene.search.IndexSearcher,
* org.apache.lucene.search.Query, int, org.apache.lucene.facet.FacetsCollectorManager)
* FacetsCollectorManager.search(...)} utility methods.
*
* <p>There is a facets collecting code example in {@link
* org.apache.lucene.demo.facet.SimpleFacetsExample#facetsWithSearch()}, see <a

View File

@ -22,23 +22,9 @@ import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopFieldCollectorManager;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TopScoreDocCollectorManager;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.DocIdSetBuilder;
@ -155,118 +141,4 @@ public class FacetsCollector extends SimpleCollector {
scores = null;
context = null;
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static TopDocs search(IndexSearcher searcher, Query q, int n, Collector fc)
throws IOException {
return doSearch(searcher, null, q, n, null, false, fc);
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static TopFieldDocs search(IndexSearcher searcher, Query q, int n, Sort sort, Collector fc)
throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return (TopFieldDocs) doSearch(searcher, null, q, n, sort, false, fc);
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static TopFieldDocs search(
IndexSearcher searcher, Query q, int n, Sort sort, boolean doDocScores, Collector fc)
throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return (TopFieldDocs) doSearch(searcher, null, q, n, sort, doDocScores, fc);
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static TopDocs searchAfter(
IndexSearcher searcher, ScoreDoc after, Query q, int n, Collector fc) throws IOException {
return doSearch(searcher, after, q, n, null, false, fc);
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static TopDocs searchAfter(
IndexSearcher searcher, ScoreDoc after, Query q, int n, Sort sort, Collector fc)
throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return doSearch(searcher, after, q, n, sort, false, fc);
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static TopDocs searchAfter(
IndexSearcher searcher,
ScoreDoc after,
Query q,
int n,
Sort sort,
boolean doDocScores,
Collector fc)
throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return doSearch(searcher, after, q, n, sort, doDocScores, fc);
}
private static TopDocs doSearch(
IndexSearcher searcher,
ScoreDoc after,
Query q,
int n,
Sort sort,
boolean doDocScores,
Collector fc)
throws IOException {
int limit = searcher.getIndexReader().maxDoc();
if (limit == 0) {
limit = 1;
}
n = Math.min(n, limit);
if (after != null && after.doc >= limit) {
throw new IllegalArgumentException(
"after.doc exceeds the number of documents in the reader: after.doc="
+ after.doc
+ " limit="
+ limit);
}
TopDocs topDocs = null;
if (n == 0) {
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
searcher.search(q, MultiCollector.wrap(totalHitCountCollector, fc));
topDocs =
new TopDocs(
new TotalHits(totalHitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO),
new ScoreDoc[0]);
} else {
TopDocsCollector<?> hitsCollector;
if (sort != null) {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
hitsCollector =
new TopFieldCollectorManager(sort, n, (FieldDoc) after, Integer.MAX_VALUE, false)
.newCollector(); // TODO: can we disable exact hit counts
} else {
hitsCollector =
new TopScoreDocCollectorManager(n, after, Integer.MAX_VALUE, false).newCollector();
}
searcher.search(q, MultiCollector.wrap(hitsCollector, fc));
topDocs = hitsCollector.topDocs();
if (doDocScores) {
TopFieldCollector.populateScores(topDocs.scoreDocs, searcher, q);
}
}
return topDocs;
}
}

View File

@ -19,7 +19,20 @@ package org.apache.lucene.facet;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.CollectorManager;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MultiCollectorManager;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopFieldCollectorManager;
import org.apache.lucene.search.TopScoreDocCollectorManager;
import org.apache.lucene.search.TotalHitCountCollectorManager;
import org.apache.lucene.search.TotalHits;
/**
* A {@link CollectorManager} implementation which produces FacetsCollector and produces a merged
@ -27,12 +40,24 @@ import org.apache.lucene.search.CollectorManager;
*/
public class FacetsCollectorManager implements CollectorManager<FacetsCollector, FacetsCollector> {
private final boolean keepScores;
/** Sole constructor. */
public FacetsCollectorManager() {}
public FacetsCollectorManager() {
this(false);
}
/**
* Creates a new collector manager that in turn creates {@link FacetsCollector} using the provided
* {@code keepScores} flag. hits.
*/
public FacetsCollectorManager(boolean keepScores) {
this.keepScores = keepScores;
}
@Override
public FacetsCollector newCollector() throws IOException {
return new FacetsCollector();
return new FacetsCollector(keepScores);
}
@Override
@ -54,4 +79,138 @@ public class FacetsCollectorManager implements CollectorManager<FacetsCollector,
facetsCollector -> matchingDocs.addAll(facetsCollector.getMatchingDocs()));
}
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static FacetsResult search(
IndexSearcher searcher, Query q, int n, FacetsCollectorManager fcm) throws IOException {
return doSearch(searcher, null, q, n, null, false, fcm);
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static FacetsResult search(
IndexSearcher searcher, Query q, int n, Sort sort, FacetsCollectorManager fcm)
throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return doSearch(searcher, null, q, n, sort, false, fcm);
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static FacetsResult search(
IndexSearcher searcher,
Query q,
int n,
Sort sort,
boolean doDocScores,
FacetsCollectorManager fcm)
throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return doSearch(searcher, null, q, n, sort, doDocScores, fcm);
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static FacetsResult searchAfter(
IndexSearcher searcher, ScoreDoc after, Query q, int n, FacetsCollectorManager fcm)
throws IOException {
return doSearch(searcher, after, q, n, null, false, fcm);
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static FacetsResult searchAfter(
IndexSearcher searcher, ScoreDoc after, Query q, int n, Sort sort, FacetsCollectorManager fcm)
throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return doSearch(searcher, after, q, n, sort, false, fcm);
}
/** Utility method, to search and also collect all hits into the provided {@link Collector}. */
public static FacetsResult searchAfter(
IndexSearcher searcher,
ScoreDoc after,
Query q,
int n,
Sort sort,
boolean doDocScores,
FacetsCollectorManager fcm)
throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return doSearch(searcher, after, q, n, sort, doDocScores, fcm);
}
private static FacetsResult doSearch(
IndexSearcher searcher,
ScoreDoc after,
Query q,
int n,
Sort sort,
boolean doDocScores,
FacetsCollectorManager fcm)
throws IOException {
int limit = searcher.getIndexReader().maxDoc();
if (limit == 0) {
limit = 1;
}
n = Math.min(n, limit);
if (after != null && after.doc >= limit) {
throw new IllegalArgumentException(
"after.doc exceeds the number of documents in the reader: after.doc="
+ after.doc
+ " limit="
+ limit);
}
final TopDocs topDocs;
final FacetsCollector facetsCollector;
if (n == 0) {
TotalHitCountCollectorManager hitCountCollectorManager = new TotalHitCountCollectorManager();
MultiCollectorManager multiCollectorManager =
new MultiCollectorManager(hitCountCollectorManager, fcm);
Object[] result = searcher.search(q, multiCollectorManager);
topDocs =
new TopDocs(
new TotalHits((Integer) result[0], TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]);
facetsCollector = (FacetsCollector) result[1];
} else {
final MultiCollectorManager multiCollectorManager;
if (sort != null) {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
TopFieldCollectorManager topFieldCollectorManager =
new TopFieldCollectorManager(sort, n, (FieldDoc) after, Integer.MAX_VALUE, true);
multiCollectorManager = new MultiCollectorManager(topFieldCollectorManager, fcm);
} else {
TopScoreDocCollectorManager topScoreDocCollectorManager =
new TopScoreDocCollectorManager(n, after, Integer.MAX_VALUE, true);
multiCollectorManager = new MultiCollectorManager(topScoreDocCollectorManager, fcm);
}
Object[] result = searcher.search(q, multiCollectorManager);
topDocs = (TopDocs) result[0];
if (doDocScores) {
TopFieldCollector.populateScores(topDocs.scoreDocs, searcher, q);
}
facetsCollector = (FacetsCollector) result[1];
}
return new FacetsResult(topDocs, facetsCollector);
}
/**
* Holds results of a search run via static utility methods exposed by this class. Those include
* {@link TopDocs} as well as facets result included in the returned {@link FacetsCollector}
*
* @param topDocs the top docs
* @param facetsCollector the facets result included in a {@link FacetsCollector} instance
*/
public record FacetsResult(TopDocs topDocs, FacetsCollector facetsCollector) {}
}

View File

@ -46,8 +46,8 @@
* all methods implement a common {@link org.apache.lucene.facet.Facets} base API that you use to
* obtain specific facet counts.
*
* <p>The various {@link org.apache.lucene.facet.FacetsCollector#search} utility methods are useful
* for doing an "ordinary" search (sorting by score, or by a specified Sort) but also collecting
* into a {@link org.apache.lucene.facet.FacetsCollector} for subsequent faceting.
* <p>The various {@link org.apache.lucene.facet.FacetsCollectorManager#search} utility methods are
* useful for doing an "ordinary" search (sorting by score, or by a specified Sort) but also
* collecting into a {@link org.apache.lucene.facet.FacetsCollectorManager} for subsequent faceting.
*/
package org.apache.lucene.facet;

View File

@ -189,8 +189,10 @@ public class TestDrillDownQuery extends FacetTestCase {
DrillDownQuery q = new DrillDownQuery(config);
q.add("b", "1");
int limit = 0;
FacetsCollector facetCollector = new FacetsCollector();
FacetsCollector.search(searcher, q, limit, facetCollector);
FacetsCollector facetCollector =
FacetsCollectorManager.search(searcher, q, limit, new FacetsCollectorManager())
.facetsCollector();
Facets facets =
getTaxonomyFacetCounts(
taxo, config, facetCollector, config.getDimConfig("b").indexFieldName);

View File

@ -331,9 +331,9 @@ public class TestMultipleIndexFields extends FacetTestCase {
}
private FacetsCollector performSearch(IndexSearcher searcher) throws IOException {
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, fc);
return fc;
return FacetsCollectorManager.search(
searcher, new MatchAllDocsQuery(), 10, new FacetsCollectorManager())
.facetsCollector();
}
private void seedIndex(TaxonomyWriter tw, RandomIndexWriter iw, FacetsConfig config)

View File

@ -1362,9 +1362,13 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
if (VERBOSE) {
System.out.println("\nTEST: iter content=" + searchToken);
}
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(
searcher, new TermQuery(new Term("content", searchToken)), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(
searcher,
new TermQuery(new Term("content", searchToken)),
10,
new FacetsCollectorManager())
.facetsCollector();
Facets facets;
if (exec != null) {
facets = new ConcurrentSortedSetDocValuesFacetCounts(state, fc, exec);
@ -1503,9 +1507,13 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
if (VERBOSE) {
System.out.println("\nTEST: iter content=" + searchToken);
}
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(
searcher, new TermQuery(new Term("content", searchToken)), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(
searcher,
new TermQuery(new Term("content", searchToken)),
10,
new FacetsCollectorManager())
.facetsCollector();
Facets facets;
if (exec != null) {
facets = new ConcurrentSortedSetDocValuesFacetCounts(state, fc, exec);

View File

@ -24,6 +24,7 @@ import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsCollectorManager;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.LabelAndValue;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -89,8 +90,10 @@ public class TestOrdinalMappingLeafReader extends FacetTestCase {
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = newSearcher(indexReader);
FacetsCollector collector = new FacetsCollector();
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, collector);
FacetsCollector collector =
FacetsCollectorManager.search(
searcher, new MatchAllDocsQuery(), 10, new FacetsCollectorManager())
.facetsCollector();
// tag facets
Facets tagFacets = new FastTaxonomyFacetCounts("$tags", taxoReader, facetConfig, collector);

View File

@ -996,8 +996,13 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
if (VERBOSE) {
System.out.println("\nTEST: iter content=" + searchToken);
}
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(searcher, new TermQuery(new Term("content", searchToken)), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(
searcher,
new TermQuery(new Term("content", searchToken)),
10,
new FacetsCollectorManager())
.facetsCollector();
Facets facets = getTaxonomyFacetCounts(tr, config, fc);
// Slow, yet hopefully bug-free, faceting:

View File

@ -309,10 +309,12 @@ public class TestTaxonomyFacetValueSource extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector fc = new FacetsCollector(true);
BoostQuery csq = new BoostQuery(new ConstantScoreQuery(new MatchAllDocsQuery()), 2f);
TopDocs td = FacetsCollector.search(newSearcher(r), csq, 10, fc);
FacetsCollectorManager.FacetsResult facetsResult =
FacetsCollectorManager.search(newSearcher(r), csq, 10, new FacetsCollectorManager(true));
TopDocs td = facetsResult.topDocs();
FacetsCollector fc = facetsResult.facetsCollector();
// Test SUM:
Facets facets =
@ -403,11 +405,12 @@ public class TestTaxonomyFacetValueSource extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector fc = new FacetsCollector(true);
// score documents by their 'price' field - makes asserting the correct counts for the
// categories easier
Query q = new FunctionQuery(new LongFieldSource("price"));
FacetsCollector.search(newSearcher(r), q, 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(newSearcher(r), q, 10, new FacetsCollectorManager(true))
.facetsCollector();
// Test SUM:
Facets facets =
@ -540,8 +543,10 @@ public class TestTaxonomyFacetValueSource extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector fc = new FacetsCollector(true);
FacetsCollector.search(newSearcher(r), new MatchAllDocsQuery(), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(
newSearcher(r), new MatchAllDocsQuery(), 10, new FacetsCollectorManager(true))
.facetsCollector();
Facets facets1 = getTaxonomyFacetCounts(taxoReader, config, fc);
Facets facets2 =
@ -595,8 +600,13 @@ public class TestTaxonomyFacetValueSource extends FacetTestCase {
if (VERBOSE) {
System.out.println("\nTEST: iter content=" + searchToken);
}
FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(searcher, new TermQuery(new Term("content", searchToken)), 10, fc);
FacetsCollector fc =
FacetsCollectorManager.search(
searcher,
new TermQuery(new Term("content", searchToken)),
10,
new FacetsCollectorManager())
.facetsCollector();
checkResults(
numDims,