mirror of https://github.com/apache/lucene.git
LUCENE-5339: address some nocommits
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5339@1546129 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
458786d0f4
commit
614af8799c
|
@ -431,7 +431,7 @@ public class IndexSearcher {
|
|||
limit = 1;
|
||||
}
|
||||
if (after != null && after.doc >= limit) {
|
||||
throw new IllegalArgumentException("after.doc exceeds the number of documents in that reader: after.doc="
|
||||
throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc="
|
||||
+ after.doc + " limit=" + limit);
|
||||
}
|
||||
nDocs = Math.min(nDocs, limit);
|
||||
|
|
|
@ -104,16 +104,15 @@ public class AssociationsFacetsExample {
|
|||
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
|
||||
FacetsConfig config = getConfig(null);
|
||||
|
||||
FacetsCollector sfc = new FacetsCollector();
|
||||
FacetsCollector fc = new FacetsCollector();
|
||||
|
||||
// MatchAllDocsQuery is for "browsing" (counts facets
|
||||
// for all non-deleted docs in the index); normally
|
||||
// you'd use a "normal" query, and use MultiCollector to
|
||||
// wrap collecting the "normal" hits and also facets:
|
||||
searcher.search(new MatchAllDocsQuery(), sfc);
|
||||
// you'd use a "normal" query:
|
||||
Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
|
||||
|
||||
Facets tags = new TaxonomyFacetSumIntAssociations("$tags", taxoReader, config, sfc);
|
||||
Facets genre = new TaxonomyFacetSumFloatAssociations("$genre", taxoReader, config, sfc);
|
||||
Facets tags = new TaxonomyFacetSumIntAssociations("$tags", taxoReader, config, fc);
|
||||
Facets genre = new TaxonomyFacetSumFloatAssociations("$genre", taxoReader, config, fc);
|
||||
|
||||
// Retrieve results
|
||||
List<FacetResult> results = new ArrayList<FacetResult>();
|
||||
|
|
|
@ -100,16 +100,15 @@ public class ExpressionAggregationFacetsExample {
|
|||
bindings.add(new SortField("popularity", SortField.Type.LONG)); // the value of the 'popularity' field
|
||||
|
||||
// Aggregates the facet values
|
||||
FacetsCollector sfc = new FacetsCollector(true);
|
||||
FacetsCollector fc = new FacetsCollector(true);
|
||||
|
||||
// MatchAllDocsQuery is for "browsing" (counts facets
|
||||
// for all non-deleted docs in the index); normally
|
||||
// you'd use a "normal" query, and use MultiCollector to
|
||||
// wrap collecting the "normal" hits and also facets:
|
||||
searcher.search(new MatchAllDocsQuery(), sfc);
|
||||
// you'd use a "normal" query:
|
||||
Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
|
||||
|
||||
// Retrieve results
|
||||
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, expr.getValueSource(bindings));
|
||||
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, expr.getValueSource(bindings));
|
||||
FacetResult result = facets.getTopChildren(10, "A");
|
||||
|
||||
indexReader.close();
|
||||
|
|
|
@ -109,22 +109,21 @@ public class MultiCategoryListsFacetsExample {
|
|||
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
|
||||
FacetsConfig config = getConfig(null);
|
||||
|
||||
FacetsCollector sfc = new FacetsCollector();
|
||||
FacetsCollector fc = new FacetsCollector();
|
||||
|
||||
// MatchAllDocsQuery is for "browsing" (counts facets
|
||||
// for all non-deleted docs in the index); normally
|
||||
// you'd use a "normal" query, and use MultiCollector to
|
||||
// wrap collecting the "normal" hits and also facets:
|
||||
searcher.search(new MatchAllDocsQuery(), sfc);
|
||||
// you'd use a "normal" query:
|
||||
Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
|
||||
|
||||
// Retrieve results
|
||||
List<FacetResult> results = new ArrayList<FacetResult>();
|
||||
|
||||
// Count both "Publish Date" and "Author" dimensions
|
||||
Facets author = new FastTaxonomyFacetCounts("author", taxoReader, config, sfc);
|
||||
Facets author = new FastTaxonomyFacetCounts("author", taxoReader, config, fc);
|
||||
results.add(author.getTopChildren(10, "Author"));
|
||||
|
||||
Facets pubDate = new FastTaxonomyFacetCounts("pubdate", taxoReader, config, sfc);
|
||||
Facets pubDate = new FastTaxonomyFacetCounts("pubdate", taxoReader, config, fc);
|
||||
results.add(pubDate.getTopChildren(10, "Publish Date"));
|
||||
|
||||
indexReader.close();
|
||||
|
|
|
@ -89,15 +89,14 @@ public class RangeFacetsExample implements Closeable {
|
|||
public FacetResult search() throws IOException {
|
||||
|
||||
// Aggregates the facet counts
|
||||
FacetsCollector sfc = new FacetsCollector();
|
||||
FacetsCollector fc = new FacetsCollector();
|
||||
|
||||
// MatchAllDocsQuery is for "browsing" (counts facets
|
||||
// for all non-deleted docs in the index); normally
|
||||
// you'd use a "normal" query, and use MultiCollector to
|
||||
// wrap collecting the "normal" hits and also facets:
|
||||
searcher.search(new MatchAllDocsQuery(), sfc);
|
||||
// you'd use a "normal" query:
|
||||
Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
|
||||
|
||||
Facets facets = new RangeFacetCounts("timestamp", sfc,
|
||||
Facets facets = new RangeFacetCounts("timestamp", fc,
|
||||
PAST_HOUR,
|
||||
PAST_SIX_HOURS,
|
||||
PAST_DAY);
|
||||
|
|
|
@ -105,19 +105,18 @@ public class SimpleFacetsExample {
|
|||
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
|
||||
FacetsConfig config = getConfig(null);
|
||||
|
||||
FacetsCollector sfc = new FacetsCollector();
|
||||
FacetsCollector fc = new FacetsCollector();
|
||||
|
||||
// MatchAllDocsQuery is for "browsing" (counts facets
|
||||
// for all non-deleted docs in the index); normally
|
||||
// you'd use a "normal" query, and use MultiCollector to
|
||||
// wrap collecting the "normal" hits and also facets:
|
||||
searcher.search(new MatchAllDocsQuery(), sfc);
|
||||
// you'd use a "normal" query:
|
||||
Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
|
||||
|
||||
// Retrieve results
|
||||
List<FacetResult> results = new ArrayList<FacetResult>();
|
||||
|
||||
// Count both "Publish Date" and "Author" dimensions
|
||||
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, sfc);
|
||||
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc);
|
||||
results.add(facets.getTopChildren(10, "Author"));
|
||||
results.add(facets.getTopChildren(10, "Publish Date"));
|
||||
|
||||
|
@ -140,11 +139,11 @@ public class SimpleFacetsExample {
|
|||
|
||||
// Now user drills down on Publish Date/2010:
|
||||
q.add("Publish Date", "2010");
|
||||
FacetsCollector sfc = new FacetsCollector();
|
||||
searcher.search(q, sfc);
|
||||
FacetsCollector fc = new FacetsCollector();
|
||||
Facets.search(searcher, q, 10, fc);
|
||||
|
||||
// Retrieve results
|
||||
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, sfc);
|
||||
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc);
|
||||
FacetResult result = facets.getTopChildren(10, "Author");
|
||||
|
||||
indexReader.close();
|
||||
|
|
|
@ -96,16 +96,15 @@ public class SimpleSortedSetFacetsExample {
|
|||
FacetsConfig config = getConfig();
|
||||
|
||||
// Aggregatses the facet counts
|
||||
FacetsCollector sfc = new FacetsCollector();
|
||||
FacetsCollector fc = new FacetsCollector();
|
||||
|
||||
// MatchAllDocsQuery is for "browsing" (counts facets
|
||||
// for all non-deleted docs in the index); normally
|
||||
// you'd use a "normal" query, and use MultiCollector to
|
||||
// wrap collecting the "normal" hits and also facets:
|
||||
searcher.search(new MatchAllDocsQuery(), sfc);
|
||||
// you'd use a "normal" query:
|
||||
Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
|
||||
|
||||
// Retrieve results
|
||||
Facets facets = new SortedSetDocValuesFacetCounts(state, sfc);
|
||||
Facets facets = new SortedSetDocValuesFacetCounts(state, fc);
|
||||
|
||||
List<FacetResult> results = new ArrayList<FacetResult>();
|
||||
results.add(facets.getTopChildren(10, "Author"));
|
||||
|
@ -125,11 +124,11 @@ public class SimpleSortedSetFacetsExample {
|
|||
// Now user drills down on Publish Year/2010:
|
||||
DrillDownQuery q = new DrillDownQuery(config);
|
||||
q.add("Publish Year", "2010");
|
||||
FacetsCollector sfc = new FacetsCollector();
|
||||
searcher.search(q, sfc);
|
||||
FacetsCollector fc = new FacetsCollector();
|
||||
Facets.search(searcher, q, 10, fc);
|
||||
|
||||
// Retrieve results
|
||||
Facets facets = new SortedSetDocValuesFacetCounts(state, sfc);
|
||||
Facets facets = new SortedSetDocValuesFacetCounts(state, fc);
|
||||
FacetResult result = facets.getTopChildren(10, "Author");
|
||||
indexReader.close();
|
||||
|
||||
|
|
|
@ -20,13 +20,16 @@ package org.apache.lucene.facet;
|
|||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.FilteredQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MultiCollector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopDocsCollector;
|
||||
import org.apache.lucene.search.TopFieldCollector;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import org.apache.lucene.search.TopScoreDocCollector;
|
||||
|
@ -52,74 +55,108 @@ public abstract class Facets {
|
|||
* depending on the type of document. */
|
||||
public abstract List<FacetResult> getAllDims(int topN) throws IOException;
|
||||
|
||||
// nocommit where to move?
|
||||
// nocommit where to put these utility methods?
|
||||
|
||||
/** Utility method, to search for top hits by score
|
||||
* ({@link IndexSearcher#search(Query,int)}), but
|
||||
* also collect results into a {@link
|
||||
* FacetsCollector} for faceting. */
|
||||
public static TopDocs search(IndexSearcher searcher, Query q, int topN, FacetsCollector sfc) throws IOException {
|
||||
// TODO: can we pass the "right" boolean for
|
||||
// in-order...? we'd need access to the protected
|
||||
// IS.search methods taking Weight... could use
|
||||
// reflection...
|
||||
TopScoreDocCollector hitsCollector = TopScoreDocCollector.create(topN, false);
|
||||
searcher.search(q, MultiCollector.wrap(hitsCollector, sfc));
|
||||
return hitsCollector.topDocs();
|
||||
/** Utility method, to search and also collect all hits
|
||||
* into the provided {@link FacetsCollector}. */
|
||||
public static TopDocs search(IndexSearcher searcher, Query q, int n, FacetsCollector fc) throws IOException {
|
||||
return doSearch(searcher, null, q, null, n, null, false, false, fc);
|
||||
}
|
||||
|
||||
// nocommit where to move?
|
||||
/** Utility method, to search and also collect all hits
|
||||
* into the provided {@link FacetsCollector}. */
|
||||
public static TopDocs search(IndexSearcher searcher, Query q, Filter filter, int n, FacetsCollector fc) throws IOException {
|
||||
return doSearch(searcher, null, q, filter, n, null, false, false, fc);
|
||||
}
|
||||
|
||||
/** Utility method, to search and also collect all hits
|
||||
* into the provided {@link FacetsCollector}. */
|
||||
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int n, Sort sort, FacetsCollector fc) throws IOException {
|
||||
if (sort == null) {
|
||||
throw new IllegalArgumentException("sort must not be null");
|
||||
}
|
||||
return (TopFieldDocs) doSearch(searcher, null, q, filter, n, sort, false, false, fc);
|
||||
}
|
||||
|
||||
/** Utility method, to search and also collect all hits
|
||||
* into the provided {@link FacetsCollector}. */
|
||||
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int n, Sort sort, boolean doDocScores, boolean doMaxScore, FacetsCollector fc) throws IOException {
|
||||
if (sort == null) {
|
||||
throw new IllegalArgumentException("sort must not be null");
|
||||
}
|
||||
return (TopFieldDocs) doSearch(searcher, null, q, filter, n, sort, doDocScores, doMaxScore, fc);
|
||||
}
|
||||
|
||||
/** Utility method, to search and also collect all hits
|
||||
* into the provided {@link FacetsCollector}. */
|
||||
public TopDocs searchAfter(IndexSearcher searcher, ScoreDoc after, Query q, int n, FacetsCollector fc) throws IOException {
|
||||
return doSearch(searcher, after, q, null, n, null, false, false, fc);
|
||||
}
|
||||
|
||||
/** Utility method, to search and also collect all hits
|
||||
* into the provided {@link FacetsCollector}. */
|
||||
public static TopDocs searchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, FacetsCollector fc) throws IOException {
|
||||
return doSearch(searcher, after, q, filter, n, null, false, false, fc);
|
||||
}
|
||||
|
||||
/** Utility method, to search and also collect all hits
|
||||
* into the provided {@link FacetsCollector}. */
|
||||
public static TopDocs searchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort, FacetsCollector fc) throws IOException {
|
||||
if (sort == null) {
|
||||
throw new IllegalArgumentException("sort must not be null");
|
||||
}
|
||||
return (TopFieldDocs) doSearch(searcher, after, q, filter, n, sort, false, false, fc);
|
||||
}
|
||||
|
||||
/** Utility method, to search and also collect all hits
|
||||
* into the provided {@link FacetsCollector}. */
|
||||
public static TopDocs searchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort, boolean doDocScores, boolean doMaxScore, FacetsCollector fc) throws IOException {
|
||||
if (sort == null) {
|
||||
throw new IllegalArgumentException("sort must not be null");
|
||||
}
|
||||
return (TopFieldDocs) doSearch(searcher, after, q, filter, n, sort, doDocScores, doMaxScore, fc);
|
||||
}
|
||||
|
||||
private static TopDocs doSearch(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort,
|
||||
boolean doDocScores, boolean doMaxScore, FacetsCollector fc) throws IOException {
|
||||
|
||||
/** Utility method, to search for top hits by score with a filter
|
||||
* ({@link IndexSearcher#search(Query,Filter,int)}), but
|
||||
* also collect results into a {@link
|
||||
* FacetsCollector} for faceting. */
|
||||
public static TopDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, FacetsCollector sfc) throws IOException {
|
||||
if (filter != null) {
|
||||
q = new FilteredQuery(q, filter);
|
||||
}
|
||||
return search(searcher, q, topN, sfc);
|
||||
}
|
||||
|
||||
// nocommit where to move?
|
||||
|
||||
/** Utility method, to search for top hits by a custom
|
||||
* {@link Sort} with a filter
|
||||
* ({@link IndexSearcher#search(Query,Filter,int,Sort)}), but
|
||||
* also collect results into a {@link
|
||||
* FacetsCollector} for faceting. */
|
||||
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, Sort sort, FacetsCollector sfc) throws IOException {
|
||||
return search(searcher, q, filter, topN, sort, false, false, sfc);
|
||||
}
|
||||
|
||||
// nocommit where to move?
|
||||
|
||||
/** Utility method, to search for top hits by a custom
|
||||
* {@link Sort} with a filter
|
||||
* ({@link IndexSearcher#search(Query,Filter,int,Sort,boolean,boolean)}), but
|
||||
* also collect results into a {@link
|
||||
* FacetsCollector} for faceting. */
|
||||
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, Sort sort, boolean doDocScores, boolean doMaxScore, FacetsCollector sfc) throws IOException {
|
||||
int limit = searcher.getIndexReader().maxDoc();
|
||||
if (limit == 0) {
|
||||
limit = 1;
|
||||
}
|
||||
topN = Math.min(topN, limit);
|
||||
n = Math.min(n, limit);
|
||||
|
||||
boolean fillFields = true;
|
||||
TopFieldCollector hitsCollector = TopFieldCollector.create(sort, topN,
|
||||
null,
|
||||
fillFields,
|
||||
doDocScores,
|
||||
doMaxScore,
|
||||
false);
|
||||
if (filter != null) {
|
||||
q = new FilteredQuery(q, filter);
|
||||
if (after != null && after.doc >= limit) {
|
||||
throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc="
|
||||
+ after.doc + " limit=" + limit);
|
||||
}
|
||||
searcher.search(q, MultiCollector.wrap(hitsCollector, sfc));
|
||||
return (TopFieldDocs) hitsCollector.topDocs();
|
||||
|
||||
TopDocsCollector<?> hitsCollector;
|
||||
if (sort != null) {
|
||||
if (after != null && !(after instanceof FieldDoc)) {
|
||||
// TODO: if we fix type safety of TopFieldDocs we can
|
||||
// remove this
|
||||
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
|
||||
}
|
||||
boolean fillFields = true;
|
||||
hitsCollector = TopFieldCollector.create(sort, n,
|
||||
(FieldDoc) after,
|
||||
fillFields,
|
||||
doDocScores,
|
||||
doMaxScore,
|
||||
false);
|
||||
} else {
|
||||
// TODO: can we pass the right boolean for
|
||||
// in-order instead of hardwired to false...? we'd
|
||||
// need access to the protected IS.search methods
|
||||
// taking Weight... could use reflection...
|
||||
hitsCollector = TopScoreDocCollector.create(n, after, false);
|
||||
}
|
||||
searcher.search(q, MultiCollector.wrap(hitsCollector, fc));
|
||||
return hitsCollector.topDocs();
|
||||
}
|
||||
|
||||
// nocommit need searchAfter variants too
|
||||
|
||||
}
|
||||
|
|
|
@ -35,7 +35,6 @@ public abstract class FloatTaxonomyFacets extends TaxonomyFacets {
|
|||
values = new float[taxoReader.getSize()];
|
||||
}
|
||||
|
||||
// nocommit we could do this lazily instead:
|
||||
protected void rollup() throws IOException {
|
||||
// Rollup any necessary dims:
|
||||
for(Map.Entry<String,FacetsConfig.DimConfig> ent : config.getDimConfigs().entrySet()) {
|
||||
|
|
|
@ -35,7 +35,6 @@ public abstract class IntTaxonomyFacets extends TaxonomyFacets {
|
|||
values = new int[taxoReader.getSize()];
|
||||
}
|
||||
|
||||
// nocommit we could do this lazily instead:
|
||||
protected void rollup() throws IOException {
|
||||
// Rollup any necessary dims:
|
||||
for(Map.Entry<String,FacetsConfig.DimConfig> ent : config.getDimConfigs().entrySet()) {
|
||||
|
|
|
@ -18,12 +18,9 @@ package org.apache.lucene.facet;
|
|||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.taxonomy.FacetLabel;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
|
|
|
@ -18,9 +18,6 @@ package org.apache.lucene.facet;
|
|||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
|
@ -28,8 +25,6 @@ import org.apache.lucene.analysis.MockTokenizer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.taxonomy.FacetLabel;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
|
@ -47,7 +42,6 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestDrillDownQuery extends FacetTestCase {
|
||||
|
||||
|
|
|
@ -18,12 +18,8 @@ package org.apache.lucene.facet;
|
|||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.DoubleDocValuesField;
|
||||
|
@ -33,9 +29,7 @@ import org.apache.lucene.document.FloatDocValuesField;
|
|||
import org.apache.lucene.document.FloatField;
|
||||
import org.apache.lucene.document.LongField;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.DrillSideways.DrillSidewaysResult;
|
||||
import org.apache.lucene.facet.taxonomy.FacetLabel;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
|
@ -50,8 +44,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
|
||||
// nocommit rename to TestRangeFacetCounts
|
||||
public class TestRangeFacets extends FacetTestCase {
|
||||
public class TestRangeFacetCounts extends FacetTestCase {
|
||||
|
||||
public void testBasicLong() throws Exception {
|
||||
Directory d = newDirectory();
|
|
@ -26,9 +26,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.SearcherTaxonomyManager.SearcherAndTaxonomy;
|
||||
import org.apache.lucene.facet.taxonomy.FacetLabel;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.facet;
|
|||
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.facet.taxonomy.FacetLabel;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
|
|
|
@ -30,14 +30,11 @@ import org.apache.lucene.analysis.MockAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.taxonomy.FacetLabel;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.NoMergePolicy;
|
||||
|
|
|
@ -17,15 +17,9 @@ package org.apache.lucene.facet;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -33,11 +27,7 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.IntField;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.facet.FacetTestCase;
|
||||
import org.apache.lucene.facet.taxonomy.FacetLabel;
|
||||
import org.apache.lucene.facet.taxonomy.PrintTaxonomyStats;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
|
@ -51,22 +41,15 @@ import org.apache.lucene.queries.function.ValueSource;
|
|||
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
|
||||
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
|
||||
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
|
||||
import org.apache.lucene.queries.function.valuesource.QueryValueSource;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.MultiCollector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopScoreDocCollector;
|
||||
import org.apache.lucene.search.similarities.DefaultSimilarity;
|
||||
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
|
||||
|
||||
|
@ -122,8 +105,8 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
|
|||
|
||||
// MatchAllDocsQuery is for "browsing" (counts facets
|
||||
// for all non-deleted docs in the index); normally
|
||||
// you'd use a "normal" query, and use MultiCollector to
|
||||
// wrap collecting the "normal" hits and also facets:
|
||||
// you'd use a "normal" query and one of the
|
||||
// Facets.search utility methods:
|
||||
searcher.search(new MatchAllDocsQuery(), c);
|
||||
|
||||
TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, new FacetsConfig(), c, new IntFieldSource("num"));
|
||||
|
@ -274,15 +257,13 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
|
|||
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
|
||||
|
||||
FacetsCollector fc = new FacetsCollector(true);
|
||||
TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false);
|
||||
ConstantScoreQuery csq = new ConstantScoreQuery(new MatchAllDocsQuery());
|
||||
csq.setBoost(2.0f);
|
||||
|
||||
newSearcher(r).search(csq, MultiCollector.wrap(fc, topDocs));
|
||||
TopDocs td = Facets.search(newSearcher(r), csq, 10, fc);
|
||||
|
||||
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, new TaxonomyFacetSumValueSource.ScoreValueSource());
|
||||
|
||||
TopDocs td = topDocs.topDocs();
|
||||
int expected = (int) (td.getMaxScore() * td.totalHits);
|
||||
assertEquals(expected, facets.getSpecificValue("dim", "a").intValue());
|
||||
|
||||
|
@ -354,12 +335,12 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
|
|||
@Override public String description() { return "score()"; }
|
||||
};
|
||||
|
||||
FacetsCollector sfc = new FacetsCollector(true);
|
||||
FacetsCollector fc = new FacetsCollector(true);
|
||||
TopScoreDocCollector tsdc = TopScoreDocCollector.create(10, true);
|
||||
// score documents by their 'price' field - makes asserting the correct counts for the categories easier
|
||||
Query q = new FunctionQuery(new LongFieldSource("price"));
|
||||
newSearcher(r).search(q, MultiCollector.wrap(tsdc, sfc));
|
||||
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, valueSource);
|
||||
Facets.search(newSearcher(r), q, 10, fc);
|
||||
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, valueSource);
|
||||
|
||||
assertEquals("value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString());
|
||||
|
||||
|
@ -416,15 +397,14 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
|
|||
DirectoryReader r = DirectoryReader.open(iw, true);
|
||||
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
|
||||
|
||||
FacetsCollector sfc = new FacetsCollector(true);
|
||||
TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false);
|
||||
newSearcher(r).search(new MatchAllDocsQuery(), MultiCollector.wrap(sfc, topDocs));
|
||||
FacetsCollector fc = new FacetsCollector(true);
|
||||
TopDocs hits = Facets.search(newSearcher(r), new MatchAllDocsQuery(), 10, fc);
|
||||
|
||||
Facets facets1 = getTaxonomyFacetCounts(taxoReader, config, sfc);
|
||||
Facets facets2 = new TaxonomyFacetSumValueSource(new DocValuesOrdinalsReader("$b"), taxoReader, config, sfc, new TaxonomyFacetSumValueSource.ScoreValueSource());
|
||||
Facets facets1 = getTaxonomyFacetCounts(taxoReader, config, fc);
|
||||
Facets facets2 = new TaxonomyFacetSumValueSource(new DocValuesOrdinalsReader("$b"), taxoReader, config, fc, new TaxonomyFacetSumValueSource.ScoreValueSource());
|
||||
|
||||
assertEquals(r.maxDoc(), facets1.getTopChildren(10, "a").value.intValue());
|
||||
double expected = topDocs.topDocs().getMaxScore() * r.numDocs();
|
||||
double expected = hits.getMaxScore() * r.numDocs();
|
||||
assertEquals(r.maxDoc(), facets2.getTopChildren(10, "b").value.doubleValue(), 1E-10);
|
||||
IOUtils.close(taxoWriter, iw, taxoReader, taxoDir, r, indexDir);
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.junit.Test;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
public class TestCategoryPath extends FacetTestCase {
|
||||
public class TestFacetLabel extends FacetTestCase {
|
||||
|
||||
@Test
|
||||
public void testBasic() {
|
Loading…
Reference in New Issue