LUCENE-5339: address some nocommits

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5339@1546129 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2013-11-27 17:32:42 +00:00
parent 458786d0f4
commit 614af8799c
18 changed files with 138 additions and 151 deletions

View File

@ -431,7 +431,7 @@ public class IndexSearcher {
limit = 1; limit = 1;
} }
if (after != null && after.doc >= limit) { if (after != null && after.doc >= limit) {
throw new IllegalArgumentException("after.doc exceeds the number of documents in that reader: after.doc=" throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc="
+ after.doc + " limit=" + limit); + after.doc + " limit=" + limit);
} }
nDocs = Math.min(nDocs, limit); nDocs = Math.min(nDocs, limit);

View File

@ -104,16 +104,15 @@ public class AssociationsFacetsExample {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
FacetsConfig config = getConfig(null); FacetsConfig config = getConfig(null);
FacetsCollector sfc = new FacetsCollector(); FacetsCollector fc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets // MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally // for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to // you'd use a "normal" query:
// wrap collecting the "normal" hits and also facets: Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
searcher.search(new MatchAllDocsQuery(), sfc);
Facets tags = new TaxonomyFacetSumIntAssociations("$tags", taxoReader, config, sfc); Facets tags = new TaxonomyFacetSumIntAssociations("$tags", taxoReader, config, fc);
Facets genre = new TaxonomyFacetSumFloatAssociations("$genre", taxoReader, config, sfc); Facets genre = new TaxonomyFacetSumFloatAssociations("$genre", taxoReader, config, fc);
// Retrieve results // Retrieve results
List<FacetResult> results = new ArrayList<FacetResult>(); List<FacetResult> results = new ArrayList<FacetResult>();

View File

@ -100,16 +100,15 @@ public class ExpressionAggregationFacetsExample {
bindings.add(new SortField("popularity", SortField.Type.LONG)); // the value of the 'popularity' field bindings.add(new SortField("popularity", SortField.Type.LONG)); // the value of the 'popularity' field
// Aggregates the facet values // Aggregates the facet values
FacetsCollector sfc = new FacetsCollector(true); FacetsCollector fc = new FacetsCollector(true);
// MatchAllDocsQuery is for "browsing" (counts facets // MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally // for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to // you'd use a "normal" query:
// wrap collecting the "normal" hits and also facets: Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
searcher.search(new MatchAllDocsQuery(), sfc);
// Retrieve results // Retrieve results
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, expr.getValueSource(bindings)); Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, expr.getValueSource(bindings));
FacetResult result = facets.getTopChildren(10, "A"); FacetResult result = facets.getTopChildren(10, "A");
indexReader.close(); indexReader.close();

View File

@ -109,22 +109,21 @@ public class MultiCategoryListsFacetsExample {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
FacetsConfig config = getConfig(null); FacetsConfig config = getConfig(null);
FacetsCollector sfc = new FacetsCollector(); FacetsCollector fc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets // MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally // for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to // you'd use a "normal" query:
// wrap collecting the "normal" hits and also facets: Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
searcher.search(new MatchAllDocsQuery(), sfc);
// Retrieve results // Retrieve results
List<FacetResult> results = new ArrayList<FacetResult>(); List<FacetResult> results = new ArrayList<FacetResult>();
// Count both "Publish Date" and "Author" dimensions // Count both "Publish Date" and "Author" dimensions
Facets author = new FastTaxonomyFacetCounts("author", taxoReader, config, sfc); Facets author = new FastTaxonomyFacetCounts("author", taxoReader, config, fc);
results.add(author.getTopChildren(10, "Author")); results.add(author.getTopChildren(10, "Author"));
Facets pubDate = new FastTaxonomyFacetCounts("pubdate", taxoReader, config, sfc); Facets pubDate = new FastTaxonomyFacetCounts("pubdate", taxoReader, config, fc);
results.add(pubDate.getTopChildren(10, "Publish Date")); results.add(pubDate.getTopChildren(10, "Publish Date"));
indexReader.close(); indexReader.close();

View File

@ -89,15 +89,14 @@ public class RangeFacetsExample implements Closeable {
public FacetResult search() throws IOException { public FacetResult search() throws IOException {
// Aggregates the facet counts // Aggregates the facet counts
FacetsCollector sfc = new FacetsCollector(); FacetsCollector fc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets // MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally // for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to // you'd use a "normal" query:
// wrap collecting the "normal" hits and also facets: Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
searcher.search(new MatchAllDocsQuery(), sfc);
Facets facets = new RangeFacetCounts("timestamp", sfc, Facets facets = new RangeFacetCounts("timestamp", fc,
PAST_HOUR, PAST_HOUR,
PAST_SIX_HOURS, PAST_SIX_HOURS,
PAST_DAY); PAST_DAY);

View File

@ -105,19 +105,18 @@ public class SimpleFacetsExample {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir); TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
FacetsConfig config = getConfig(null); FacetsConfig config = getConfig(null);
FacetsCollector sfc = new FacetsCollector(); FacetsCollector fc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets // MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally // for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to // you'd use a "normal" query:
// wrap collecting the "normal" hits and also facets: Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
searcher.search(new MatchAllDocsQuery(), sfc);
// Retrieve results // Retrieve results
List<FacetResult> results = new ArrayList<FacetResult>(); List<FacetResult> results = new ArrayList<FacetResult>();
// Count both "Publish Date" and "Author" dimensions // Count both "Publish Date" and "Author" dimensions
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, sfc); Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc);
results.add(facets.getTopChildren(10, "Author")); results.add(facets.getTopChildren(10, "Author"));
results.add(facets.getTopChildren(10, "Publish Date")); results.add(facets.getTopChildren(10, "Publish Date"));
@ -140,11 +139,11 @@ public class SimpleFacetsExample {
// Now user drills down on Publish Date/2010: // Now user drills down on Publish Date/2010:
q.add("Publish Date", "2010"); q.add("Publish Date", "2010");
FacetsCollector sfc = new FacetsCollector(); FacetsCollector fc = new FacetsCollector();
searcher.search(q, sfc); Facets.search(searcher, q, 10, fc);
// Retrieve results // Retrieve results
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, sfc); Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc);
FacetResult result = facets.getTopChildren(10, "Author"); FacetResult result = facets.getTopChildren(10, "Author");
indexReader.close(); indexReader.close();

View File

@ -96,16 +96,15 @@ public class SimpleSortedSetFacetsExample {
FacetsConfig config = getConfig(); FacetsConfig config = getConfig();
// Aggregatses the facet counts // Aggregatses the facet counts
FacetsCollector sfc = new FacetsCollector(); FacetsCollector fc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets // MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally // for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to // you'd use a "normal" query:
// wrap collecting the "normal" hits and also facets: Facets.search(searcher, new MatchAllDocsQuery(), 10, fc);
searcher.search(new MatchAllDocsQuery(), sfc);
// Retrieve results // Retrieve results
Facets facets = new SortedSetDocValuesFacetCounts(state, sfc); Facets facets = new SortedSetDocValuesFacetCounts(state, fc);
List<FacetResult> results = new ArrayList<FacetResult>(); List<FacetResult> results = new ArrayList<FacetResult>();
results.add(facets.getTopChildren(10, "Author")); results.add(facets.getTopChildren(10, "Author"));
@ -125,11 +124,11 @@ public class SimpleSortedSetFacetsExample {
// Now user drills down on Publish Year/2010: // Now user drills down on Publish Year/2010:
DrillDownQuery q = new DrillDownQuery(config); DrillDownQuery q = new DrillDownQuery(config);
q.add("Publish Year", "2010"); q.add("Publish Year", "2010");
FacetsCollector sfc = new FacetsCollector(); FacetsCollector fc = new FacetsCollector();
searcher.search(q, sfc); Facets.search(searcher, q, 10, fc);
// Retrieve results // Retrieve results
Facets facets = new SortedSetDocValuesFacetCounts(state, sfc); Facets facets = new SortedSetDocValuesFacetCounts(state, fc);
FacetResult result = facets.getTopChildren(10, "Author"); FacetResult result = facets.getTopChildren(10, "Author");
indexReader.close(); indexReader.close();

View File

@ -20,13 +20,16 @@ package org.apache.lucene.facet;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.search.TopScoreDocCollector;
@ -52,74 +55,108 @@ public abstract class Facets {
* depending on the type of document. */ * depending on the type of document. */
public abstract List<FacetResult> getAllDims(int topN) throws IOException; public abstract List<FacetResult> getAllDims(int topN) throws IOException;
// nocommit where to move? // nocommit where to put these utility methods?
/** Utility method, to search for top hits by score /** Utility method, to search and also collect all hits
* ({@link IndexSearcher#search(Query,int)}), but * into the provided {@link FacetsCollector}. */
* also collect results into a {@link public static TopDocs search(IndexSearcher searcher, Query q, int n, FacetsCollector fc) throws IOException {
* FacetsCollector} for faceting. */ return doSearch(searcher, null, q, null, n, null, false, false, fc);
public static TopDocs search(IndexSearcher searcher, Query q, int topN, FacetsCollector sfc) throws IOException {
// TODO: can we pass the "right" boolean for
// in-order...? we'd need access to the protected
// IS.search methods taking Weight... could use
// reflection...
TopScoreDocCollector hitsCollector = TopScoreDocCollector.create(topN, false);
searcher.search(q, MultiCollector.wrap(hitsCollector, sfc));
return hitsCollector.topDocs();
} }
// nocommit where to move? /** Utility method, to search and also collect all hits
* into the provided {@link FacetsCollector}. */
public static TopDocs search(IndexSearcher searcher, Query q, Filter filter, int n, FacetsCollector fc) throws IOException {
return doSearch(searcher, null, q, filter, n, null, false, false, fc);
}
/** Utility method, to search and also collect all hits
* into the provided {@link FacetsCollector}. */
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int n, Sort sort, FacetsCollector fc) throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return (TopFieldDocs) doSearch(searcher, null, q, filter, n, sort, false, false, fc);
}
/** Utility method, to search and also collect all hits
* into the provided {@link FacetsCollector}. */
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int n, Sort sort, boolean doDocScores, boolean doMaxScore, FacetsCollector fc) throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return (TopFieldDocs) doSearch(searcher, null, q, filter, n, sort, doDocScores, doMaxScore, fc);
}
/** Utility method, to search and also collect all hits
* into the provided {@link FacetsCollector}. */
public TopDocs searchAfter(IndexSearcher searcher, ScoreDoc after, Query q, int n, FacetsCollector fc) throws IOException {
return doSearch(searcher, after, q, null, n, null, false, false, fc);
}
/** Utility method, to search and also collect all hits
* into the provided {@link FacetsCollector}. */
public static TopDocs searchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, FacetsCollector fc) throws IOException {
return doSearch(searcher, after, q, filter, n, null, false, false, fc);
}
/** Utility method, to search and also collect all hits
* into the provided {@link FacetsCollector}. */
public static TopDocs searchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort, FacetsCollector fc) throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return (TopFieldDocs) doSearch(searcher, after, q, filter, n, sort, false, false, fc);
}
/** Utility method, to search and also collect all hits
* into the provided {@link FacetsCollector}. */
public static TopDocs searchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort, boolean doDocScores, boolean doMaxScore, FacetsCollector fc) throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must not be null");
}
return (TopFieldDocs) doSearch(searcher, after, q, filter, n, sort, doDocScores, doMaxScore, fc);
}
private static TopDocs doSearch(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort,
boolean doDocScores, boolean doMaxScore, FacetsCollector fc) throws IOException {
/** Utility method, to search for top hits by score with a filter
* ({@link IndexSearcher#search(Query,Filter,int)}), but
* also collect results into a {@link
* FacetsCollector} for faceting. */
public static TopDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, FacetsCollector sfc) throws IOException {
if (filter != null) { if (filter != null) {
q = new FilteredQuery(q, filter); q = new FilteredQuery(q, filter);
} }
return search(searcher, q, topN, sfc);
}
// nocommit where to move?
/** Utility method, to search for top hits by a custom
* {@link Sort} with a filter
* ({@link IndexSearcher#search(Query,Filter,int,Sort)}), but
* also collect results into a {@link
* FacetsCollector} for faceting. */
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, Sort sort, FacetsCollector sfc) throws IOException {
return search(searcher, q, filter, topN, sort, false, false, sfc);
}
// nocommit where to move?
/** Utility method, to search for top hits by a custom
* {@link Sort} with a filter
* ({@link IndexSearcher#search(Query,Filter,int,Sort,boolean,boolean)}), but
* also collect results into a {@link
* FacetsCollector} for faceting. */
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, Sort sort, boolean doDocScores, boolean doMaxScore, FacetsCollector sfc) throws IOException {
int limit = searcher.getIndexReader().maxDoc(); int limit = searcher.getIndexReader().maxDoc();
if (limit == 0) { if (limit == 0) {
limit = 1; limit = 1;
} }
topN = Math.min(topN, limit); n = Math.min(n, limit);
if (after != null && after.doc >= limit) {
throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc="
+ after.doc + " limit=" + limit);
}
TopDocsCollector<?> hitsCollector;
if (sort != null) {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
boolean fillFields = true; boolean fillFields = true;
TopFieldCollector hitsCollector = TopFieldCollector.create(sort, topN, hitsCollector = TopFieldCollector.create(sort, n,
null, (FieldDoc) after,
fillFields, fillFields,
doDocScores, doDocScores,
doMaxScore, doMaxScore,
false); false);
if (filter != null) { } else {
q = new FilteredQuery(q, filter); // TODO: can we pass the right boolean for
// in-order instead of hardwired to false...? we'd
// need access to the protected IS.search methods
// taking Weight... could use reflection...
hitsCollector = TopScoreDocCollector.create(n, after, false);
} }
searcher.search(q, MultiCollector.wrap(hitsCollector, sfc)); searcher.search(q, MultiCollector.wrap(hitsCollector, fc));
return (TopFieldDocs) hitsCollector.topDocs(); return hitsCollector.topDocs();
} }
// nocommit need searchAfter variants too
} }

View File

@ -35,7 +35,6 @@ public abstract class FloatTaxonomyFacets extends TaxonomyFacets {
values = new float[taxoReader.getSize()]; values = new float[taxoReader.getSize()];
} }
// nocommit we could do this lazily instead:
protected void rollup() throws IOException { protected void rollup() throws IOException {
// Rollup any necessary dims: // Rollup any necessary dims:
for(Map.Entry<String,FacetsConfig.DimConfig> ent : config.getDimConfigs().entrySet()) { for(Map.Entry<String,FacetsConfig.DimConfig> ent : config.getDimConfigs().entrySet()) {

View File

@ -35,7 +35,6 @@ public abstract class IntTaxonomyFacets extends TaxonomyFacets {
values = new int[taxoReader.getSize()]; values = new int[taxoReader.getSize()];
} }
// nocommit we could do this lazily instead:
protected void rollup() throws IOException { protected void rollup() throws IOException {
// Rollup any necessary dims: // Rollup any necessary dims:
for(Map.Entry<String,FacetsConfig.DimConfig> ent : config.getDimConfigs().entrySet()) { for(Map.Entry<String,FacetsConfig.DimConfig> ent : config.getDimConfigs().entrySet()) {

View File

@ -18,12 +18,9 @@ package org.apache.lucene.facet;
*/ */
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;

View File

@ -18,9 +18,6 @@ package org.apache.lucene.facet;
*/ */
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Random; import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
@ -28,8 +25,6 @@ import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -47,7 +42,6 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test;
public class TestDrillDownQuery extends FacetTestCase { public class TestDrillDownQuery extends FacetTestCase {

View File

@ -18,12 +18,8 @@ package org.apache.lucene.facet;
*/ */
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleDocValuesField; import org.apache.lucene.document.DoubleDocValuesField;
@ -33,9 +29,7 @@ import org.apache.lucene.document.FloatDocValuesField;
import org.apache.lucene.document.FloatField; import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.LongField; import org.apache.lucene.document.LongField;
import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.DrillSideways.DrillSidewaysResult; import org.apache.lucene.facet.DrillSideways.DrillSidewaysResult;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -50,8 +44,7 @@ import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
// nocommit rename to TestRangeFacetCounts public class TestRangeFacetCounts extends FacetTestCase {
public class TestRangeFacets extends FacetTestCase {
public void testBasicLong() throws Exception { public void testBasicLong() throws Exception {
Directory d = newDirectory(); Directory d = newDirectory();

View File

@ -26,9 +26,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.SearcherTaxonomyManager.SearcherAndTaxonomy; import org.apache.lucene.facet.SearcherTaxonomyManager.SearcherAndTaxonomy;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;

View File

@ -19,7 +19,6 @@ package org.apache.lucene.facet;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;

View File

@ -30,14 +30,11 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.NoMergePolicy;

View File

@ -17,15 +17,9 @@ package org.apache.lucene.facet;
* limitations under the License. * limitations under the License.
*/ */
import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
@ -33,11 +27,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntField; import org.apache.lucene.document.IntField;
import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.PrintTaxonomyStats;
import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
@ -51,22 +41,15 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues; import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.apache.lucene.queries.function.valuesource.IntFieldSource; import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource; import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.queries.function.valuesource.QueryValueSource;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util._TestUtil;
public class TestTaxonomyFacetSumValueSource extends FacetTestCase { public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
@ -122,8 +105,8 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
// MatchAllDocsQuery is for "browsing" (counts facets // MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally // for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to // you'd use a "normal" query and one of the
// wrap collecting the "normal" hits and also facets: // Facets.search utility methods:
searcher.search(new MatchAllDocsQuery(), c); searcher.search(new MatchAllDocsQuery(), c);
TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, new FacetsConfig(), c, new IntFieldSource("num")); TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, new FacetsConfig(), c, new IntFieldSource("num"));
@ -274,15 +257,13 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector fc = new FacetsCollector(true); FacetsCollector fc = new FacetsCollector(true);
TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false);
ConstantScoreQuery csq = new ConstantScoreQuery(new MatchAllDocsQuery()); ConstantScoreQuery csq = new ConstantScoreQuery(new MatchAllDocsQuery());
csq.setBoost(2.0f); csq.setBoost(2.0f);
newSearcher(r).search(csq, MultiCollector.wrap(fc, topDocs)); TopDocs td = Facets.search(newSearcher(r), csq, 10, fc);
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, new TaxonomyFacetSumValueSource.ScoreValueSource()); Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, new TaxonomyFacetSumValueSource.ScoreValueSource());
TopDocs td = topDocs.topDocs();
int expected = (int) (td.getMaxScore() * td.totalHits); int expected = (int) (td.getMaxScore() * td.totalHits);
assertEquals(expected, facets.getSpecificValue("dim", "a").intValue()); assertEquals(expected, facets.getSpecificValue("dim", "a").intValue());
@ -354,12 +335,12 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
@Override public String description() { return "score()"; } @Override public String description() { return "score()"; }
}; };
FacetsCollector sfc = new FacetsCollector(true); FacetsCollector fc = new FacetsCollector(true);
TopScoreDocCollector tsdc = TopScoreDocCollector.create(10, true); TopScoreDocCollector tsdc = TopScoreDocCollector.create(10, true);
// score documents by their 'price' field - makes asserting the correct counts for the categories easier // score documents by their 'price' field - makes asserting the correct counts for the categories easier
Query q = new FunctionQuery(new LongFieldSource("price")); Query q = new FunctionQuery(new LongFieldSource("price"));
newSearcher(r).search(q, MultiCollector.wrap(tsdc, sfc)); Facets.search(newSearcher(r), q, 10, fc);
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, valueSource); Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, valueSource);
assertEquals("value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString()); assertEquals("value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString());
@ -416,15 +397,14 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw, true); DirectoryReader r = DirectoryReader.open(iw, true);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector sfc = new FacetsCollector(true); FacetsCollector fc = new FacetsCollector(true);
TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false); TopDocs hits = Facets.search(newSearcher(r), new MatchAllDocsQuery(), 10, fc);
newSearcher(r).search(new MatchAllDocsQuery(), MultiCollector.wrap(sfc, topDocs));
Facets facets1 = getTaxonomyFacetCounts(taxoReader, config, sfc); Facets facets1 = getTaxonomyFacetCounts(taxoReader, config, fc);
Facets facets2 = new TaxonomyFacetSumValueSource(new DocValuesOrdinalsReader("$b"), taxoReader, config, sfc, new TaxonomyFacetSumValueSource.ScoreValueSource()); Facets facets2 = new TaxonomyFacetSumValueSource(new DocValuesOrdinalsReader("$b"), taxoReader, config, fc, new TaxonomyFacetSumValueSource.ScoreValueSource());
assertEquals(r.maxDoc(), facets1.getTopChildren(10, "a").value.intValue()); assertEquals(r.maxDoc(), facets1.getTopChildren(10, "a").value.intValue());
double expected = topDocs.topDocs().getMaxScore() * r.numDocs(); double expected = hits.getMaxScore() * r.numDocs();
assertEquals(r.maxDoc(), facets2.getTopChildren(10, "b").value.doubleValue(), 1E-10); assertEquals(r.maxDoc(), facets2.getTopChildren(10, "b").value.doubleValue(), 1E-10);
IOUtils.close(taxoWriter, iw, taxoReader, taxoDir, r, indexDir); IOUtils.close(taxoWriter, iw, taxoReader, taxoDir, r, indexDir);
} }

View File

@ -23,7 +23,7 @@ import org.junit.Test;
* limitations under the License. * limitations under the License.
*/ */
public class TestCategoryPath extends FacetTestCase { public class TestFacetLabel extends FacetTestCase {
@Test @Test
public void testBasic() { public void testBasic() {