LUCENE-5339: move/rename away from simple

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5339@1545798 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2013-11-26 19:53:22 +00:00
parent ea40a32e4a
commit 80743b6e78
71 changed files with 492 additions and 453 deletions

1
TODO
View File

@ -2,6 +2,7 @@ nocommit this!
TODO
- allow path.length==0?
- make a variant/sugar of FacetsConfig.build that just updates an existing doc?
- need test coverage of utility search methods
- move DocumentBuilder.build -> FacetsConfig.build
- getSpecificValue for a dim isn't reliable

View File

@ -20,7 +20,8 @@ package org.apache.lucene.benchmark.byTask.feeds;
import java.io.IOException;
import java.util.List;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetsConfig;
/**
* Source items for facets.
@ -34,7 +35,9 @@ public abstract class FacetSource extends ContentItemsSource {
* account for multi-threading, as multiple threads can call this method
* simultaneously.
*/
public abstract void getNextFacets(List<FacetLabel> facets) throws NoMoreDataException, IOException;
public abstract void getNextFacets(List<FacetField> facets) throws NoMoreDataException, IOException;
public abstract void configure(FacetsConfig config);
@Override
public void resetInputs() throws IOException {

View File

@ -22,7 +22,8 @@ import java.util.List;
import java.util.Random;
import org.apache.lucene.benchmark.byTask.utils.Config;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetsConfig;
/**
* Simple implementation of a random facet source
@ -31,6 +32,9 @@ import org.apache.lucene.facet.taxonomy.FacetLabel;
* <ul>
* <li><b>rand.seed</b> - defines the seed to initialize {@link Random} with
* (default: <b>13</b>).
* <li><b>max.doc.facet.dims</b> - Max number of random dimensions to
* create (default: <b>5</b>); actual number of dimensions
* would be anything between 1 and that number.
* <li><b>max.doc.facets</b> - maximal #facets per doc (default: <b>10</b>).
* Actual number of facets in a certain doc would be anything between 1 and that
* number.
@ -44,22 +48,33 @@ public class RandomFacetSource extends FacetSource {
private Random random;
private int maxDocFacets;
private int maxFacetDepth;
private int maxDims;
private int maxValue = maxDocFacets * maxFacetDepth;
@Override
public void getNextFacets(List<FacetLabel> facets) throws NoMoreDataException, IOException {
public void getNextFacets(List<FacetField> facets) throws NoMoreDataException, IOException {
facets.clear();
int numFacets = 1 + random.nextInt(maxDocFacets); // at least one facet to each doc
for (int i = 0; i < numFacets; i++) {
int depth = 1 + random.nextInt(maxFacetDepth); // depth 0 is not useful
String[] components = new String[depth];
for (int k = 0; k < depth; k++) {
String dim = Integer.toString(random.nextInt(maxDims));
String[] components = new String[depth-1];
for (int k = 0; k < depth-1; k++) {
components[k] = Integer.toString(random.nextInt(maxValue));
addItem();
}
FacetLabel cp = new FacetLabel(components);
facets.add(cp);
addBytes(cp.toString().length()); // very rough approximation
FacetField ff = new FacetField(dim, components);
facets.add(ff);
addBytes(ff.toString().length()); // very rough approximation
}
}
@Override
public void configure(FacetsConfig config) {
for(int i=0;i<maxDims;i++) {
config.setHierarchical(Integer.toString(i), true);
config.setMultiValued(Integer.toString(i), true);
}
}
@ -73,6 +88,7 @@ public class RandomFacetSource extends FacetSource {
super.setConfig(config);
random = new Random(config.get("rand.seed", 13));
maxDocFacets = config.get("max.doc.facets", 10);
maxDims = config.get("max.doc.facets.dims", 5);
maxFacetDepth = config.get("max.facet.depth", 3);
maxValue = maxDocFacets * maxFacetDepth;
}

View File

@ -22,8 +22,14 @@ import java.util.List;
import org.apache.lucene.benchmark.byTask.PerfRunData;
import org.apache.lucene.benchmark.byTask.feeds.FacetSource;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.IndexDocument;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.StorableField;
/**
* Add a faceted document.
@ -44,8 +50,8 @@ import org.apache.lucene.facet.taxonomy.FacetLabel;
*/
public class AddFacetedDocTask extends AddDocTask {
private final List<FacetLabel> facets = new ArrayList<FacetLabel>();
private FacetFields facetFields;
private final List<FacetField> facets = new ArrayList<FacetField>();
private FacetsConfig config;
public AddFacetedDocTask(PerfRunData runData) {
super(runData);
@ -54,19 +60,22 @@ public class AddFacetedDocTask extends AddDocTask {
@Override
public void setup() throws Exception {
super.setup();
if (facetFields == null) {
if (config == null) {
boolean withFacets = getRunData().getConfig().get("with.facets", true);
if (withFacets) {
// nocommit is this called once? are we adding same
// facets over and over!?
FacetSource facetsSource = getRunData().getFacetSource();
facetFields = withFacets ? new FacetFields(getRunData().getTaxonomyWriter()) : null;
config = new FacetsConfig(getRunData().getTaxonomyWriter());
facetsSource.getNextFacets(facets);
facetsSource.configure(config);
}
}
}
@Override
protected String getLogMessage(int recsCount) {
if (facetFields == null) {
if (config == null) {
return super.getLogMessage(recsCount);
}
return super.getLogMessage(recsCount)+ " with facets";
@ -74,10 +83,21 @@ public class AddFacetedDocTask extends AddDocTask {
@Override
public int doLogic() throws Exception {
if (facetFields != null) {
facetFields.addFields(doc, facets);
if (config != null) {
// nocommit hokey:
Document doc2 = new Document();
for(FacetField ff : facets) {
doc2.add(ff);
}
IndexDocument doc3 = config.build(doc2);
for(StorableField field : doc3.storableFields()) {
doc.add((Field) field);
}
for(IndexableField field : doc3.indexableFields()) {
doc.add((Field) field);
}
}
return super.doLogic();
}
}

View File

@ -23,14 +23,14 @@ import java.util.List;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.simple.Facets;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.simple.FloatAssociationFacetField;
import org.apache.lucene.facet.simple.IntAssociationFacetField;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.simple.SimpleFacetsCollector;
import org.apache.lucene.facet.simple.TaxonomyFacetSumFloatAssociations;
import org.apache.lucene.facet.simple.TaxonomyFacetSumIntAssociations;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.FloatAssociationFacetField;
import org.apache.lucene.facet.IntAssociationFacetField;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.TaxonomyFacetSumFloatAssociations;
import org.apache.lucene.facet.TaxonomyFacetSumIntAssociations;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
@ -98,13 +98,13 @@ public class AssociationsFacetsExample {
}
/** User runs a query and aggregates facets by summing their association values. */
private List<SimpleFacetResult> sumAssociations() throws IOException {
private List<FacetResult> sumAssociations() throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
FacetsConfig config = getConfig(null);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
@ -116,7 +116,7 @@ public class AssociationsFacetsExample {
Facets genre = new TaxonomyFacetSumFloatAssociations("$genre", taxoReader, config, sfc);
// Retrieve results
List<SimpleFacetResult> results = new ArrayList<SimpleFacetResult>();
List<FacetResult> results = new ArrayList<FacetResult>();
results.add(tags.getTopChildren(10, "tags"));
results.add(genre.getTopChildren(10, "genre"));
@ -127,7 +127,7 @@ public class AssociationsFacetsExample {
}
/** Runs summing association example. */
public List<SimpleFacetResult> runSumAssociations() throws IOException {
public List<FacetResult> runSumAssociations() throws IOException {
index();
return sumAssociations();
}
@ -136,7 +136,7 @@ public class AssociationsFacetsExample {
public static void main(String[] args) throws Exception {
System.out.println("Sum associations example:");
System.out.println("-------------------------");
List<SimpleFacetResult> results = new AssociationsFacetsExample().runSumAssociations();
List<FacetResult> results = new AssociationsFacetsExample().runSumAssociations();
System.out.println("tags: " + results.get(0));
System.out.println("genre: " + results.get(1));
}

View File

@ -13,12 +13,12 @@ import org.apache.lucene.document.TextField;
import org.apache.lucene.expressions.Expression;
import org.apache.lucene.expressions.SimpleBindings;
import org.apache.lucene.expressions.js.JavascriptCompiler;
import org.apache.lucene.facet.simple.FacetField;
import org.apache.lucene.facet.simple.Facets;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.simple.SimpleFacetsCollector;
import org.apache.lucene.facet.simple.TaxonomyFacetSumValueSource;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.TaxonomyFacetSumValueSource;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -86,7 +86,7 @@ public class ExpressionAggregationFacetsExample {
}
/** User runs a query and aggregates facets. */
private SimpleFacetResult search() throws IOException, ParseException {
private FacetResult search() throws IOException, ParseException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
@ -100,7 +100,7 @@ public class ExpressionAggregationFacetsExample {
bindings.add(new SortField("popularity", SortField.Type.LONG)); // the value of the 'popularity' field
// Aggregates the facet values
SimpleFacetsCollector sfc = new SimpleFacetsCollector(true);
FacetsCollector sfc = new FacetsCollector(true);
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
@ -110,7 +110,7 @@ public class ExpressionAggregationFacetsExample {
// Retrieve results
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, expr.getValueSource(bindings));
SimpleFacetResult result = facets.getTopChildren(10, "A");
FacetResult result = facets.getTopChildren(10, "A");
indexReader.close();
taxoReader.close();
@ -119,7 +119,7 @@ public class ExpressionAggregationFacetsExample {
}
/** Runs the search example. */
public SimpleFacetResult runSearch() throws IOException, ParseException {
public FacetResult runSearch() throws IOException, ParseException {
index();
return search();
}
@ -128,8 +128,7 @@ public class ExpressionAggregationFacetsExample {
public static void main(String[] args) throws Exception {
System.out.println("Facet counting example:");
System.out.println("-----------------------");
SimpleFacetResult result = new ExpressionAggregationFacetsExample().runSearch();
FacetResult result = new ExpressionAggregationFacetsExample().runSearch();
System.out.println(result);
}
}

View File

@ -25,12 +25,12 @@ import java.util.Map;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.simple.FacetField;
import org.apache.lucene.facet.simple.Facets;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.simple.FastTaxonomyFacetCounts;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.simple.SimpleFacetsCollector;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.FastTaxonomyFacetCounts;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
@ -103,13 +103,13 @@ public class MultiCategoryListsFacetsExample {
}
/** User runs a query and counts facets. */
private List<SimpleFacetResult> search() throws IOException {
private List<FacetResult> search() throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
FacetsConfig config = getConfig(null);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
@ -118,7 +118,7 @@ public class MultiCategoryListsFacetsExample {
searcher.search(new MatchAllDocsQuery(), sfc);
// Retrieve results
List<SimpleFacetResult> results = new ArrayList<SimpleFacetResult>();
List<FacetResult> results = new ArrayList<FacetResult>();
// Count both "Publish Date" and "Author" dimensions
Facets author = new FastTaxonomyFacetCounts("author", taxoReader, config, sfc);
@ -134,7 +134,7 @@ public class MultiCategoryListsFacetsExample {
}
/** Runs the search example. */
public List<SimpleFacetResult> runSearch() throws IOException {
public List<FacetResult> runSearch() throws IOException {
index();
return search();
}
@ -143,7 +143,7 @@ public class MultiCategoryListsFacetsExample {
public static void main(String[] args) throws Exception {
System.out.println("Facet counting over multiple category lists example:");
System.out.println("-----------------------");
List<SimpleFacetResult> results = new MultiCategoryListsFacetsExample().runSearch();
List<FacetResult> results = new MultiCategoryListsFacetsExample().runSearch();
System.out.println("Author: " + results.get(0));
System.out.println("Publish Date: " + results.get(1));
}

View File

@ -26,13 +26,13 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.facet.simple.Facets;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.simple.LongRange;
import org.apache.lucene.facet.simple.RangeFacetCounts;
import org.apache.lucene.facet.simple.SimpleDrillDownQuery;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.simple.SimpleFacetsCollector;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.LongRange;
import org.apache.lucene.facet.RangeFacetCounts;
import org.apache.lucene.facet.DrillDownQuery;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
@ -86,10 +86,10 @@ public class RangeFacetsExample implements Closeable {
}
/** User runs a query and counts facets. */
public SimpleFacetResult search() throws IOException {
public FacetResult search() throws IOException {
// Aggregates the facet counts
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
@ -109,7 +109,7 @@ public class RangeFacetsExample implements Closeable {
// Passing no baseQuery means we drill down on all
// documents ("browse only"):
SimpleDrillDownQuery q = new SimpleDrillDownQuery(getConfig());
DrillDownQuery q = new DrillDownQuery(getConfig());
// Use FieldCacheRangeFilter; this will use
// NumericDocValues:

View File

@ -23,13 +23,13 @@ import java.util.List;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.simple.FacetField;
import org.apache.lucene.facet.simple.Facets;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.simple.FastTaxonomyFacetCounts;
import org.apache.lucene.facet.simple.SimpleDrillDownQuery;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.simple.SimpleFacetsCollector;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.FastTaxonomyFacetCounts;
import org.apache.lucene.facet.DrillDownQuery;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
@ -99,13 +99,13 @@ public class SimpleFacetsExample {
}
/** User runs a query and counts facets. */
private List<SimpleFacetResult> search() throws IOException {
private List<FacetResult> search() throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
FacetsConfig config = getConfig(null);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
@ -114,7 +114,7 @@ public class SimpleFacetsExample {
searcher.search(new MatchAllDocsQuery(), sfc);
// Retrieve results
List<SimpleFacetResult> results = new ArrayList<SimpleFacetResult>();
List<FacetResult> results = new ArrayList<FacetResult>();
// Count both "Publish Date" and "Author" dimensions
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, sfc);
@ -128,7 +128,7 @@ public class SimpleFacetsExample {
}
/** User drills down on 'Publish Date/2010'. */
private SimpleFacetResult drillDown() throws IOException {
private FacetResult drillDown() throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
@ -136,16 +136,16 @@ public class SimpleFacetsExample {
// Passing no baseQuery means we drill down on all
// documents ("browse only"):
SimpleDrillDownQuery q = new SimpleDrillDownQuery(config);
DrillDownQuery q = new DrillDownQuery(config);
// Now user drills down on Publish Date/2010:
q.add("Publish Date", "2010");
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
searcher.search(q, sfc);
// Retrieve results
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, sfc);
SimpleFacetResult result = facets.getTopChildren(10, "Author");
FacetResult result = facets.getTopChildren(10, "Author");
indexReader.close();
taxoReader.close();
@ -154,13 +154,13 @@ public class SimpleFacetsExample {
}
/** Runs the search example. */
public List<SimpleFacetResult> runSearch() throws IOException {
public List<FacetResult> runSearch() throws IOException {
index();
return search();
}
/** Runs the drill-down example. */
public SimpleFacetResult runDrillDown() throws IOException {
public FacetResult runDrillDown() throws IOException {
index();
return drillDown();
}
@ -170,7 +170,7 @@ public class SimpleFacetsExample {
System.out.println("Facet counting example:");
System.out.println("-----------------------");
SimpleFacetsExample example = new SimpleFacetsExample();
List<SimpleFacetResult> results = example.runSearch();
List<FacetResult> results = example.runSearch();
System.out.println("Author: " + results.get(0));
System.out.println("Publish Date: " + results.get(1));

View File

@ -23,14 +23,14 @@ import java.util.List;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.simple.Facets;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.simple.SimpleDrillDownQuery;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.simple.SimpleFacetsCollector;
import org.apache.lucene.facet.simple.SortedSetDocValuesFacetCounts;
import org.apache.lucene.facet.simple.SortedSetDocValuesFacetField;
import org.apache.lucene.facet.simple.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.DrillDownQuery;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.SortedSetDocValuesFacetCounts;
import org.apache.lucene.facet.SortedSetDocValuesFacetField;
import org.apache.lucene.facet.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
@ -89,14 +89,14 @@ public class SimpleSortedSetFacetsExample {
}
/** User runs a query and counts facets. */
private List<SimpleFacetResult> search() throws IOException {
private List<FacetResult> search() throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
SortedSetDocValuesReaderState state = new SortedSetDocValuesReaderState(indexReader);
FacetsConfig config = getConfig();
// Aggregatses the facet counts
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
@ -107,7 +107,7 @@ public class SimpleSortedSetFacetsExample {
// Retrieve results
Facets facets = new SortedSetDocValuesFacetCounts(state, sfc);
List<SimpleFacetResult> results = new ArrayList<SimpleFacetResult>();
List<FacetResult> results = new ArrayList<FacetResult>();
results.add(facets.getTopChildren(10, "Author"));
results.add(facets.getTopChildren(10, "Publish Year"));
indexReader.close();
@ -116,34 +116,34 @@ public class SimpleSortedSetFacetsExample {
}
/** User drills down on 'Publish Year/2010'. */
private SimpleFacetResult drillDown() throws IOException {
private FacetResult drillDown() throws IOException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
SortedSetDocValuesReaderState state = new SortedSetDocValuesReaderState(indexReader);
FacetsConfig config = getConfig();
// Now user drills down on Publish Year/2010:
SimpleDrillDownQuery q = new SimpleDrillDownQuery(config);
DrillDownQuery q = new DrillDownQuery(config);
q.add("Publish Year", "2010");
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
searcher.search(q, sfc);
// Retrieve results
Facets facets = new SortedSetDocValuesFacetCounts(state, sfc);
SimpleFacetResult result = facets.getTopChildren(10, "Author");
FacetResult result = facets.getTopChildren(10, "Author");
indexReader.close();
return result;
}
/** Runs the search example. */
public List<SimpleFacetResult> runSearch() throws IOException {
public List<FacetResult> runSearch() throws IOException {
index();
return search();
}
/** Runs the drill-down example. */
public SimpleFacetResult runDrillDown() throws IOException {
public FacetResult runDrillDown() throws IOException {
index();
return drillDown();
}
@ -153,7 +153,7 @@ public class SimpleSortedSetFacetsExample {
System.out.println("Facet counting example:");
System.out.println("-----------------------");
SimpleSortedSetFacetsExample example = new SimpleSortedSetFacetsExample();
List<SimpleFacetResult> results = example.runSearch();
List<FacetResult> results = example.runSearch();
System.out.println("Author: " + results.get(0));
System.out.println("Publish Year: " + results.get(0));

View File

@ -19,7 +19,7 @@ package org.apache.lucene.demo.facet;
import java.util.List;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
@ -27,7 +27,7 @@ public class TestAssociationsFacetsExample extends LuceneTestCase {
@Test
public void testExamples() throws Exception {
List<SimpleFacetResult> res = new AssociationsFacetsExample().runSumAssociations();
List<FacetResult> res = new AssociationsFacetsExample().runSumAssociations();
assertEquals("Wrong number of results", 2, res.size());
assertEquals("value=6 childCount=2\n lucene (4)\n solr (2)\n", res.get(0).toString());
assertEquals("value=1.96 childCount=2\n computing (1.62)\n software (0.34)\n", res.get(1).toString());

View File

@ -20,7 +20,7 @@ package org.apache.lucene.demo.facet;
import java.util.List;
import java.util.Locale;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
@ -28,7 +28,7 @@ public class TestExpressionAggregationFacetsExample extends LuceneTestCase {
@Test
public void testSimple() throws Exception {
SimpleFacetResult result = new ExpressionAggregationFacetsExample().runSearch();
FacetResult result = new ExpressionAggregationFacetsExample().runSearch();
assertEquals("value=3.9681187 childCount=2\n B (2.236068)\n C (1.7320508)\n", result.toString());
}
}

View File

@ -19,7 +19,7 @@ package org.apache.lucene.demo.facet;
import java.util.List;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
@ -27,7 +27,7 @@ public class TestMultiCategoryListsFacetsExample extends LuceneTestCase {
@Test
public void testExample() throws Exception {
List<SimpleFacetResult> results = new MultiCategoryListsFacetsExample().runSearch();
List<FacetResult> results = new MultiCategoryListsFacetsExample().runSearch();
assertEquals(2, results.size());
assertEquals("value=5 childCount=4\n Lisa (2)\n Bob (1)\n Susan (1)\n Frank (1)\n", results.get(0).toString());
assertEquals("value=5 childCount=3\n 2010 (2)\n 2012 (2)\n 1999 (1)\n", results.get(1).toString());

View File

@ -19,7 +19,7 @@ package org.apache.lucene.demo.facet;
import java.util.List;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
@ -32,7 +32,7 @@ public class TestRangeFacetsExample extends LuceneTestCase {
public void testSimple() throws Exception {
RangeFacetsExample example = new RangeFacetsExample();
example.index();
SimpleFacetResult result = example.search();
FacetResult result = example.search();
assertEquals("value=100 childCount=3\n Past hour (4)\n Past six hours (22)\n Past day (87)\n", result.toString());
example.close();
}

View File

@ -19,7 +19,7 @@ package org.apache.lucene.demo.facet;
import java.util.List;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
@ -28,7 +28,7 @@ public class TestSimpleFacetsExample extends LuceneTestCase {
@Test
public void testSimple() throws Exception {
List<SimpleFacetResult> results = new SimpleFacetsExample().runSearch();
List<FacetResult> results = new SimpleFacetsExample().runSearch();
assertEquals(2, results.size());
assertEquals("value=5 childCount=4\n Lisa (2)\n Bob (1)\n Susan (1)\n Frank (1)\n", results.get(0).toString());
assertEquals("value=5 childCount=3\n 2010 (2)\n 2012 (2)\n 1999 (1)\n", results.get(1).toString());
@ -36,7 +36,7 @@ public class TestSimpleFacetsExample extends LuceneTestCase {
@Test
public void testDrillDown() throws Exception {
SimpleFacetResult result = new SimpleFacetsExample().runDrillDown();
FacetResult result = new SimpleFacetsExample().runDrillDown();
assertEquals("value=2 childCount=2\n Bob (1)\n Lisa (1)\n", result.toString());
}
}

View File

@ -19,7 +19,7 @@ package org.apache.lucene.demo.facet;
import java.util.List;
import org.apache.lucene.facet.simple.SimpleFacetResult;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
@ -32,7 +32,7 @@ public class TestSimpleSortedSetFacetsExample extends LuceneTestCase {
@Test
public void testSimple() throws Exception {
List<SimpleFacetResult> results = new SimpleSortedSetFacetsExample().runSearch();
List<FacetResult> results = new SimpleSortedSetFacetsExample().runSearch();
assertEquals(2, results.size());
assertEquals("value=5 childCount=4\n Lisa (2)\n Bob (1)\n Frank (1)\n Susan (1)\n", results.get(0).toString());
assertEquals("value=5 childCount=3\n 2010 (2)\n 2012 (2)\n 1999 (1)\n", results.get(1).toString());
@ -40,7 +40,7 @@ public class TestSimpleSortedSetFacetsExample extends LuceneTestCase {
@Test
public void testDrillDown() throws Exception {
SimpleFacetResult result = new SimpleSortedSetFacetsExample().runDrillDown();
FacetResult result = new SimpleSortedSetFacetsExample().runDrillDown();
assertEquals("value=2 childCount=2\n Bob (1)\n Lisa (1)\n", result.toString());
}
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -49,7 +49,7 @@ import org.apache.lucene.search.TermQuery;
*
* @lucene.experimental
*/
public final class SimpleDrillDownQuery extends Query {
public final class DrillDownQuery extends Query {
public static Term term(String field, String dim, String... path) {
return new Term(field, FacetsConfig.pathToString(dim, path));
@ -60,14 +60,14 @@ public final class SimpleDrillDownQuery extends Query {
private final Map<String,Integer> drillDownDims = new LinkedHashMap<String,Integer>();
/** Used by clone() */
SimpleDrillDownQuery(FacetsConfig config, BooleanQuery query, Map<String,Integer> drillDownDims) {
DrillDownQuery(FacetsConfig config, BooleanQuery query, Map<String,Integer> drillDownDims) {
this.query = query.clone();
this.drillDownDims.putAll(drillDownDims);
this.config = config;
}
/** Used by DrillSideways */
SimpleDrillDownQuery(FacetsConfig config, Filter filter, SimpleDrillDownQuery other) {
DrillDownQuery(FacetsConfig config, Filter filter, DrillDownQuery other) {
query = new BooleanQuery(true); // disable coord
BooleanClause[] clauses = other.query.getClauses();
@ -84,7 +84,7 @@ public final class SimpleDrillDownQuery extends Query {
}
/** Used by DrillSideways */
SimpleDrillDownQuery(FacetsConfig config, Query baseQuery, List<Query> clauses, Map<String,Integer> drillDownDims) {
DrillDownQuery(FacetsConfig config, Query baseQuery, List<Query> clauses, Map<String,Integer> drillDownDims) {
this.query = new BooleanQuery(true);
if (baseQuery != null) {
query.add(baseQuery, Occur.MUST);
@ -97,21 +97,21 @@ public final class SimpleDrillDownQuery extends Query {
}
/**
* Creates a new {@code SimpleDrillDownQuery} without a base query,
* Creates a new {@code DrillDownQuery} without a base query,
* to perform a pure browsing query (equivalent to using
* {@link MatchAllDocsQuery} as base).
*/
public SimpleDrillDownQuery(FacetsConfig config) {
public DrillDownQuery(FacetsConfig config) {
this(config, null);
}
/**
* Creates a new {@code SimpleDrillDownQuery} over the given base query. Can be
* Creates a new {@code DrillDownQuery} over the given base query. Can be
* {@code null}, in which case the result {@link Query} from
* {@link #rewrite(IndexReader)} will be a pure browsing query, filtering on
* the added categories only.
*/
public SimpleDrillDownQuery(FacetsConfig config, Query baseQuery) {
public DrillDownQuery(FacetsConfig config, Query baseQuery) {
query = new BooleanQuery(true); // disable coord
if (baseQuery != null) {
query.add(baseQuery, Occur.MUST);
@ -181,8 +181,8 @@ public final class SimpleDrillDownQuery extends Query {
}
@Override
public SimpleDrillDownQuery clone() {
return new SimpleDrillDownQuery(config, query, drillDownDims);
public DrillDownQuery clone() {
return new DrillDownQuery(config, query, drillDownDims);
}
@Override
@ -194,11 +194,11 @@ public final class SimpleDrillDownQuery extends Query {
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SimpleDrillDownQuery)) {
if (!(obj instanceof DrillDownQuery)) {
return false;
}
SimpleDrillDownQuery other = (SimpleDrillDownQuery) obj;
DrillDownQuery other = (DrillDownQuery) obj;
return query.equals(other.query) && super.equals(other);
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -66,7 +66,7 @@ import org.apache.lucene.search.Weight;
* @lucene.experimental
*/
public class SimpleDrillSideways {
public class DrillSideways {
protected final IndexSearcher searcher;
protected final TaxonomyReader taxoReader;
@ -77,7 +77,7 @@ public class SimpleDrillSideways {
* Create a new {@code DrillSideways} instance, assuming the categories were
* indexed with {@link FacetFields}.
*/
public SimpleDrillSideways(IndexSearcher searcher, FacetsConfig config, TaxonomyReader taxoReader) {
public DrillSideways(IndexSearcher searcher, FacetsConfig config, TaxonomyReader taxoReader) {
this(searcher, config, taxoReader, null);
}
@ -85,7 +85,7 @@ public class SimpleDrillSideways {
* Create a new {@code DrillSideways} instance, assuming the categories were
* indexed with {@link SortedSetDocValuesFacetFields}.
*/
public SimpleDrillSideways(IndexSearcher searcher, FacetsConfig config, SortedSetDocValuesReaderState state) {
public DrillSideways(IndexSearcher searcher, FacetsConfig config, SortedSetDocValuesReaderState state) {
this(searcher, config, null, state);
}
@ -94,7 +94,7 @@ public class SimpleDrillSideways {
* dimensions are sorted set facets and others are
* taxononmy facets.
*/
public SimpleDrillSideways(IndexSearcher searcher, FacetsConfig config, TaxonomyReader taxoReader, SortedSetDocValuesReaderState state) {
public DrillSideways(IndexSearcher searcher, FacetsConfig config, TaxonomyReader taxoReader, SortedSetDocValuesReaderState state) {
this.searcher = searcher;
this.config = config;
this.taxoReader = taxoReader;
@ -103,7 +103,7 @@ public class SimpleDrillSideways {
/** Subclass can override to customize per-dim Facets
* impl. */
protected Facets buildFacetsResult(SimpleFacetsCollector drillDowns, SimpleFacetsCollector[] drillSideways, String[] drillSidewaysDims) throws IOException {
protected Facets buildFacetsResult(FacetsCollector drillDowns, FacetsCollector[] drillSideways, String[] drillSidewaysDims) throws IOException {
Facets drillDownFacets;
Map<String,Facets> drillSidewaysFacets = new HashMap<String,Facets>();
@ -138,17 +138,17 @@ public class SimpleDrillSideways {
* computing drill down and sideways counts.
*/
@SuppressWarnings({"rawtypes","unchecked"})
public SimpleDrillSidewaysResult search(SimpleDrillDownQuery query, Collector hitCollector) throws IOException {
public DrillSidewaysResult search(DrillDownQuery query, Collector hitCollector) throws IOException {
Map<String,Integer> drillDownDims = query.getDims();
SimpleFacetsCollector drillDownCollector = new SimpleFacetsCollector();
FacetsCollector drillDownCollector = new FacetsCollector();
if (drillDownDims.isEmpty()) {
// There are no drill-down dims, so there is no
// drill-sideways to compute:
searcher.search(query, MultiCollector.wrap(hitCollector, drillDownCollector));
return new SimpleDrillSidewaysResult(buildFacetsResult(drillDownCollector, null, null), null);
return new DrillSidewaysResult(buildFacetsResult(drillDownCollector, null, null), null);
}
BooleanQuery ddq = query.getBooleanQuery();
@ -167,11 +167,11 @@ public class SimpleDrillSideways {
startClause = 1;
}
SimpleFacetsCollector[] drillSidewaysCollectors = new SimpleFacetsCollector[drillDownDims.size()];
FacetsCollector[] drillSidewaysCollectors = new FacetsCollector[drillDownDims.size()];
int idx = 0;
for(String dim : drillDownDims.keySet()) {
drillSidewaysCollectors[idx++] = new SimpleFacetsCollector();
drillSidewaysCollectors[idx++] = new FacetsCollector();
}
boolean useCollectorMethod = scoreSubDocsAtOnce();
@ -220,24 +220,24 @@ public class SimpleDrillSideways {
// continue to run "optimized"
collectorMethod(query, baseQuery, startClause, hitCollector, drillDownCollector, drillSidewaysCollectors);
} else {
SimpleDrillSidewaysQuery dsq = new SimpleDrillSidewaysQuery(baseQuery, drillDownCollector, drillSidewaysCollectors, drillDownTerms);
DrillSidewaysQuery dsq = new DrillSidewaysQuery(baseQuery, drillDownCollector, drillSidewaysCollectors, drillDownTerms);
searcher.search(dsq, hitCollector);
}
return new SimpleDrillSidewaysResult(buildFacetsResult(drillDownCollector, drillSidewaysCollectors, drillDownDims.keySet().toArray(new String[drillDownDims.size()])), null);
return new DrillSidewaysResult(buildFacetsResult(drillDownCollector, drillSidewaysCollectors, drillDownDims.keySet().toArray(new String[drillDownDims.size()])), null);
}
/** Uses the more general but slower method of sideways
* counting. This method allows an arbitrary subQuery to
* implement the drill down for a given dimension. */
private void collectorMethod(SimpleDrillDownQuery ddq, Query baseQuery, int startClause, Collector hitCollector, Collector drillDownCollector, Collector[] drillSidewaysCollectors) throws IOException {
private void collectorMethod(DrillDownQuery ddq, Query baseQuery, int startClause, Collector hitCollector, Collector drillDownCollector, Collector[] drillSidewaysCollectors) throws IOException {
BooleanClause[] clauses = ddq.getBooleanQuery().getClauses();
Map<String,Integer> drillDownDims = ddq.getDims();
BooleanQuery topQuery = new BooleanQuery(true);
final SimpleDrillSidewaysCollector collector = new SimpleDrillSidewaysCollector(hitCollector, drillDownCollector, drillSidewaysCollectors,
final DrillSidewaysCollector collector = new DrillSidewaysCollector(hitCollector, drillDownCollector, drillSidewaysCollectors,
drillDownDims);
// TODO: if query is already a BQ we could copy that and
@ -318,11 +318,11 @@ public class SimpleDrillSideways {
* Search, sorting by {@link Sort}, and computing
* drill down and sideways counts.
*/
public SimpleDrillSidewaysResult search(SimpleDrillDownQuery query,
public DrillSidewaysResult search(DrillDownQuery query,
Filter filter, FieldDoc after, int topN, Sort sort, boolean doDocScores,
boolean doMaxScore) throws IOException {
if (filter != null) {
query = new SimpleDrillDownQuery(config, filter, query);
query = new DrillDownQuery(config, filter, query);
}
if (sort != null) {
int limit = searcher.getIndexReader().maxDoc();
@ -337,8 +337,8 @@ public class SimpleDrillSideways {
doDocScores,
doMaxScore,
true);
SimpleDrillSidewaysResult r = search(query, hitCollector);
return new SimpleDrillSidewaysResult(r.facets, hitCollector.topDocs());
DrillSidewaysResult r = search(query, hitCollector);
return new DrillSidewaysResult(r.facets, hitCollector.topDocs());
} else {
return search(after, query, topN);
}
@ -348,7 +348,7 @@ public class SimpleDrillSideways {
* Search, sorting by score, and computing
* drill down and sideways counts.
*/
public SimpleDrillSidewaysResult search(SimpleDrillDownQuery query, int topN) throws IOException {
public DrillSidewaysResult search(DrillDownQuery query, int topN) throws IOException {
return search(null, query, topN);
}
@ -356,16 +356,16 @@ public class SimpleDrillSideways {
* Search, sorting by score, and computing
* drill down and sideways counts.
*/
public SimpleDrillSidewaysResult search(ScoreDoc after,
SimpleDrillDownQuery query, int topN) throws IOException {
public DrillSidewaysResult search(ScoreDoc after,
DrillDownQuery query, int topN) throws IOException {
int limit = searcher.getIndexReader().maxDoc();
if (limit == 0) {
limit = 1; // the collector does not alow numHits = 0
}
topN = Math.min(topN, limit);
TopScoreDocCollector hitCollector = TopScoreDocCollector.create(topN, after, true);
SimpleDrillSidewaysResult r = search(query, hitCollector);
return new SimpleDrillSidewaysResult(r.facets, hitCollector.topDocs());
DrillSidewaysResult r = search(query, hitCollector);
return new DrillSidewaysResult(r.facets, hitCollector.topDocs());
}
/** Override this and return true if your collector
@ -381,14 +381,14 @@ public class SimpleDrillSideways {
return false;
}
public static class SimpleDrillSidewaysResult {
public static class DrillSidewaysResult {
/** Combined drill down & sideways results. */
public final Facets facets;
/** Hits. */
public final TopDocs hits;
public SimpleDrillSidewaysResult(Facets facets, TopDocs hits) {
public DrillSidewaysResult(Facets facets, TopDocs hits) {
this.facets = facets;
this.hits = hits;
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -32,7 +32,7 @@ import org.apache.lucene.search.Weight;
* passed all constraints (a true hit) or if it missed
* exactly one dimension (a near-miss, to count for
* drill-sideways counts on that dimension). */
class SimpleDrillSidewaysCollector extends Collector {
class DrillSidewaysCollector extends Collector {
private final Collector hitCollector;
private final Collector drillDownCollector;
@ -48,7 +48,7 @@ class SimpleDrillSidewaysCollector extends Collector {
private Scorer mainScorer;
public SimpleDrillSidewaysCollector(Collector hitCollector, Collector drillDownCollector, Collector[] drillSidewaysCollectors,
public DrillSidewaysCollector(Collector hitCollector, Collector drillDownCollector, Collector[] drillSidewaysCollectors,
Map<String,Integer> dims) {
this.hitCollector = hitCollector;
this.drillDownCollector = drillDownCollector;

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -37,13 +37,13 @@ import org.apache.lucene.util.Bits;
/** Only purpose is to punch through and return a
* SimpleDrillSidewaysScorer */
class SimpleDrillSidewaysQuery extends Query {
class DrillSidewaysQuery extends Query {
final Query baseQuery;
final Collector drillDownCollector;
final Collector[] drillSidewaysCollectors;
final Term[][] drillDownTerms;
SimpleDrillSidewaysQuery(Query baseQuery, Collector drillDownCollector, Collector[] drillSidewaysCollectors, Term[][] drillDownTerms) {
DrillSidewaysQuery(Query baseQuery, Collector drillDownCollector, Collector[] drillSidewaysCollectors, Term[][] drillDownTerms) {
this.baseQuery = baseQuery;
this.drillDownCollector = drillDownCollector;
this.drillSidewaysCollectors = drillSidewaysCollectors;
@ -68,7 +68,7 @@ class SimpleDrillSidewaysQuery extends Query {
if (newQuery == baseQuery) {
return this;
} else {
return new SimpleDrillSidewaysQuery(newQuery, drillDownCollector, drillSidewaysCollectors, drillDownTerms);
return new DrillSidewaysQuery(newQuery, drillDownCollector, drillSidewaysCollectors, drillDownTerms);
}
}
@ -108,12 +108,12 @@ class SimpleDrillSidewaysQuery extends Query {
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder,
boolean topScorer, Bits acceptDocs) throws IOException {
SimpleDrillSidewaysScorer.DocsEnumsAndFreq[] dims = new SimpleDrillSidewaysScorer.DocsEnumsAndFreq[drillDownTerms.length];
DrillSidewaysScorer.DocsEnumsAndFreq[] dims = new DrillSidewaysScorer.DocsEnumsAndFreq[drillDownTerms.length];
TermsEnum termsEnum = null;
String lastField = null;
int nullCount = 0;
for(int dim=0;dim<dims.length;dim++) {
dims[dim] = new SimpleDrillSidewaysScorer.DocsEnumsAndFreq();
dims[dim] = new DrillSidewaysScorer.DocsEnumsAndFreq();
dims[dim].sidewaysCollector = drillSidewaysCollectors[dim];
String field = drillDownTerms[dim][0].field();
dims[dim].dim = drillDownTerms[dim][0].text();
@ -158,7 +158,7 @@ class SimpleDrillSidewaysQuery extends Query {
return null;
}
return new SimpleDrillSidewaysScorer(this, context,
return new DrillSidewaysScorer(this, context,
baseScorer,
drillDownCollector, dims);
}
@ -184,7 +184,7 @@ class SimpleDrillSidewaysQuery extends Query {
if (this == obj) return true;
if (!super.equals(obj)) return false;
if (getClass() != obj.getClass()) return false;
SimpleDrillSidewaysQuery other = (SimpleDrillSidewaysQuery) obj;
DrillSidewaysQuery other = (DrillSidewaysQuery) obj;
if (baseQuery == null) {
if (other.baseQuery != null) return false;
} else if (!baseQuery.equals(other.baseQuery)) return false;

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -28,7 +28,7 @@ import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.FixedBitSet;
class SimpleDrillSidewaysScorer extends Scorer {
class DrillSidewaysScorer extends Scorer {
//private static boolean DEBUG = false;
@ -47,7 +47,7 @@ class SimpleDrillSidewaysScorer extends Scorer {
private int collectDocID = -1;
private float collectScore;
SimpleDrillSidewaysScorer(Weight w, AtomicReaderContext context, Scorer baseScorer, Collector drillDownCollector,
DrillSidewaysScorer(Weight w, AtomicReaderContext context, Scorer baseScorer, Collector drillDownCollector,
DocsEnumsAndFreq[] dims) {
super(w);
this.dims = dims;

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -20,7 +20,7 @@ package org.apache.lucene.facet.simple;
import java.util.Arrays;
import java.util.List;
public final class SimpleFacetResult {
public final class FacetResult {
/** Total value for this path (sum of all child counts, or
* sum of all child values), even those not included in
@ -34,7 +34,7 @@ public final class SimpleFacetResult {
/** Child counts. */
public final LabelAndValue[] labelValues;
public SimpleFacetResult(Number value, LabelAndValue[] labelValues, int childCount) {
public FacetResult(Number value, LabelAndValue[] labelValues, int childCount) {
this.value = value;
this.labelValues = labelValues;
this.childCount = childCount;
@ -56,10 +56,10 @@ public final class SimpleFacetResult {
@Override
public boolean equals(Object _other) {
if ((_other instanceof SimpleFacetResult) == false) {
if ((_other instanceof FacetResult) == false) {
return false;
}
SimpleFacetResult other = (SimpleFacetResult) _other;
FacetResult other = (FacetResult) _other;
return value.equals(other.value) &&
childCount == other.childCount &&
Arrays.equals(labelValues, other.labelValues);

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -35,7 +35,7 @@ public abstract class Facets {
/** Returns the topN child labels under the specified
* path. Returns null if the specified path doesn't
* exist. */
public abstract SimpleFacetResult getTopChildren(int topN, String dim, String... path) throws IOException;
public abstract FacetResult getTopChildren(int topN, String dim, String... path) throws IOException;
/** Return the count for a specific path. Returns -1 if
* this path doesn't exist, else the count. */
@ -46,15 +46,15 @@ public abstract class Facets {
* this is used for "sparse" faceting, where many
* different dimensions were indexed depending on the
* type of document. */
public abstract List<SimpleFacetResult> getAllDims(int topN) throws IOException;
public abstract List<FacetResult> getAllDims(int topN) throws IOException;
// nocommit where to move?
/** Utility method, to search for top hits by score
* ({@link IndexSearcher#search(Query,int)}), but
* also collect results into a {@link
* SimpleFacetsCollector} for faceting. */
public static TopDocs search(IndexSearcher searcher, Query q, int topN, SimpleFacetsCollector sfc) throws IOException {
* FacetsCollector} for faceting. */
public static TopDocs search(IndexSearcher searcher, Query q, int topN, FacetsCollector sfc) throws IOException {
// nocommit can we pass the "right" boolean for
// in-order...? we'd need access to the protected
// IS.search methods taking Weight... could use
@ -69,8 +69,8 @@ public abstract class Facets {
/** Utility method, to search for top hits by score with a filter
* ({@link IndexSearcher#search(Query,Filter,int)}), but
* also collect results into a {@link
* SimpleFacetsCollector} for faceting. */
public static TopDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, SimpleFacetsCollector sfc) throws IOException {
* FacetsCollector} for faceting. */
public static TopDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, FacetsCollector sfc) throws IOException {
if (filter != null) {
q = new FilteredQuery(q, filter);
}
@ -83,8 +83,8 @@ public abstract class Facets {
* {@link Sort} with a filter
* ({@link IndexSearcher#search(Query,Filter,int,Sort)}), but
* also collect results into a {@link
* SimpleFacetsCollector} for faceting. */
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, Sort sort, SimpleFacetsCollector sfc) throws IOException {
* FacetsCollector} for faceting. */
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, Sort sort, FacetsCollector sfc) throws IOException {
return search(searcher, q, filter, topN, sort, false, false, sfc);
}
@ -94,8 +94,8 @@ public abstract class Facets {
* {@link Sort} with a filter
* ({@link IndexSearcher#search(Query,Filter,int,Sort,boolean,boolean)}), but
* also collect results into a {@link
* SimpleFacetsCollector} for faceting. */
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, Sort sort, boolean doDocScores, boolean doMaxScore, SimpleFacetsCollector sfc) throws IOException {
* FacetsCollector} for faceting. */
public static TopFieldDocs search(IndexSearcher searcher, Query q, Filter filter, int topN, Sort sort, boolean doDocScores, boolean doMaxScore, FacetsCollector sfc) throws IOException {
int limit = searcher.getIndexReader().maxDoc();
if (limit == 0) {
limit = 1;

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -31,7 +31,7 @@ import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.FixedBitSet;
// nocommit javadocs
public final class SimpleFacetsCollector extends Collector {
public final class FacetsCollector extends Collector {
private AtomicReaderContext context;
private Scorer scorer;
@ -60,11 +60,11 @@ public final class SimpleFacetsCollector extends Collector {
}
}
public SimpleFacetsCollector() {
public FacetsCollector() {
this(false);
}
public SimpleFacetsCollector(boolean keepScores) {
public FacetsCollector(boolean keepScores) {
this.keepScores = keepScores;
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.BinaryDocValues;
@ -32,11 +32,11 @@ import org.apache.lucene.util.FixedBitSet;
public class FastTaxonomyFacetCounts extends TaxonomyFacets {
private final int[] counts;
public FastTaxonomyFacetCounts(TaxonomyReader taxoReader, FacetsConfig config, SimpleFacetsCollector fc) throws IOException {
public FastTaxonomyFacetCounts(TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc) throws IOException {
this(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, taxoReader, config, fc);
}
public FastTaxonomyFacetCounts(String indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, SimpleFacetsCollector fc) throws IOException {
public FastTaxonomyFacetCounts(String indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc) throws IOException {
super(indexFieldName, taxoReader, config);
counts = new int[taxoReader.getSize()];
count(fc.getMatchingDocs());
@ -119,7 +119,7 @@ public class FastTaxonomyFacetCounts extends TaxonomyFacets {
}
@Override
public SimpleFacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
public FacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
if (topN <= 0) {
throw new IllegalArgumentException("topN must be > 0 (got: " + topN + ")");
}
@ -185,6 +185,6 @@ public class FastTaxonomyFacetCounts extends TaxonomyFacets {
labelValues[i] = new LabelAndValue(child.components[cp.length], ordAndValue.value);
}
return new SimpleFacetResult(totCount, labelValues, childCount);
return new FacetResult(totCount, labelValues, childCount);
}
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -36,7 +36,7 @@ public class MultiFacets extends Facets {
this.defaultFacets = defaultFacets;
}
public SimpleFacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
public FacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
Facets facets = dimToFacets.get(dim);
if (facets == null) {
if (defaultFacets == null) {
@ -58,7 +58,7 @@ public class MultiFacets extends Facets {
return facets.getSpecificValue(dim, path);
}
public List<SimpleFacetResult> getAllDims(int topN) throws IOException {
public List<FacetResult> getAllDims(int topN) throws IOException {
// nocommit can/should we impl this? ie, sparse
// faceting after drill sideways
throw new UnsupportedOperationException();

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.Collections;
import java.util.List;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
@ -36,11 +36,11 @@ public class RangeFacetCounts extends Facets {
private final String field;
private int totCount;
public RangeFacetCounts(String field, SimpleFacetsCollector hits, Range... ranges) throws IOException {
public RangeFacetCounts(String field, FacetsCollector hits, Range... ranges) throws IOException {
this(field, new LongFieldSource(field), hits, ranges);
}
public RangeFacetCounts(String field, ValueSource valueSource, SimpleFacetsCollector hits, Range... ranges) throws IOException {
public RangeFacetCounts(String field, ValueSource valueSource, FacetsCollector hits, Range... ranges) throws IOException {
this.ranges = ranges;
this.field = field;
counts = new int[ranges.length];
@ -84,7 +84,7 @@ public class RangeFacetCounts extends Facets {
// very well:
@Override
public SimpleFacetResult getTopChildren(int topN, String dim, String... path) {
public FacetResult getTopChildren(int topN, String dim, String... path) {
if (dim.equals(field) == false) {
throw new IllegalArgumentException("invalid dim \"" + dim + "\"; should be \"" + field + "\"");
}
@ -94,7 +94,7 @@ public class RangeFacetCounts extends Facets {
labelValues[i] = new LabelAndValue(ranges[i].label, counts[i]);
}
return new SimpleFacetResult(totCount, labelValues, labelValues.length);
return new FacetResult(totCount, labelValues, labelValues.length);
}
@Override
@ -104,7 +104,7 @@ public class RangeFacetCounts extends Facets {
}
@Override
public List<SimpleFacetResult> getAllDims(int topN) throws IOException {
public List<FacetResult> getAllDims(int topN) throws IOException {
return Collections.singletonList(getTopChildren(topN, null));
}
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -26,9 +26,9 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.facet.simple.SortedSetDocValuesReaderState.OrdRange;
import org.apache.lucene.facet.simple.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.SortedSetDocValuesReaderState.OrdRange;
import org.apache.lucene.facet.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.IndexReader;
@ -61,7 +61,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
/** Sparse faceting: returns any dimension that had any
* hits, topCount labels per dimension. */
public SortedSetDocValuesFacetCounts(SortedSetDocValuesReaderState state, SimpleFacetsCollector hits)
public SortedSetDocValuesFacetCounts(SortedSetDocValuesReaderState state, FacetsCollector hits)
throws IOException {
this.state = state;
this.field = state.getField();
@ -72,7 +72,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
}
@Override
public SimpleFacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
public FacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
if (topN <= 0) {
throw new IllegalArgumentException("topN must be > 0 (got: " + topN + ")");
}
@ -86,7 +86,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
return getDim(dim, ordRange, topN);
}
private final SimpleFacetResult getDim(String dim, OrdRange ordRange, int topN) {
private final FacetResult getDim(String dim, OrdRange ordRange, int topN) {
TopOrdAndIntQueue q = null;
@ -135,7 +135,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
labelValues[i] = new LabelAndValue(parts[1], ordAndValue.value);
}
return new SimpleFacetResult(dimCount, labelValues, childCount);
return new FacetResult(dimCount, labelValues, childCount);
}
/** Does all the "real work" of tallying up the counts. */
@ -266,11 +266,11 @@ public class SortedSetDocValuesFacetCounts extends Facets {
}
@Override
public List<SimpleFacetResult> getAllDims(int topN) throws IOException {
public List<FacetResult> getAllDims(int topN) throws IOException {
List<SimpleFacetResult> results = new ArrayList<SimpleFacetResult>();
List<FacetResult> results = new ArrayList<FacetResult>();
for(Map.Entry<String,OrdRange> ent : state.getPrefixToOrdRange().entrySet()) {
SimpleFacetResult fr = getDim(ent.getKey(), ent.getValue(), topN);
FacetResult fr = getDim(ent.getKey(), ent.getValue(), topN);
if (fr != null) {
results.add(fr);
}
@ -278,9 +278,9 @@ public class SortedSetDocValuesFacetCounts extends Facets {
// Sort by highest count:
Collections.sort(results,
new Comparator<SimpleFacetResult>() {
new Comparator<FacetResult>() {
@Override
public int compare(SimpleFacetResult a, SimpleFacetResult b) {
public int compare(FacetResult a, FacetResult b) {
if (a.value.intValue() > b.value.intValue()) {
return -1;
} else if (b.value.intValue() > a.value.intValue()) {

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.BinaryDocValues;
@ -37,7 +37,7 @@ public class TaxonomyFacetCounts extends TaxonomyFacets {
private final OrdinalsReader ordinalsReader;
private final int[] counts;
public TaxonomyFacetCounts(OrdinalsReader ordinalsReader, TaxonomyReader taxoReader, FacetsConfig config, SimpleFacetsCollector fc) throws IOException {
public TaxonomyFacetCounts(OrdinalsReader ordinalsReader, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc) throws IOException {
super(ordinalsReader.getIndexFieldName(), taxoReader, config);
this.ordinalsReader = ordinalsReader;
counts = new int[taxoReader.getSize()];
@ -102,7 +102,7 @@ public class TaxonomyFacetCounts extends TaxonomyFacets {
}
@Override
public SimpleFacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
public FacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
if (topN <= 0) {
throw new IllegalArgumentException("topN must be > 0 (got: " + topN + ")");
}
@ -165,6 +165,6 @@ public class TaxonomyFacetCounts extends TaxonomyFacets {
labelValues[i] = new LabelAndValue(child.components[cp.length], ordAndValue.value);
}
return new SimpleFacetResult(totCount, labelValues, childCount);
return new FacetResult(totCount, labelValues, childCount);
}
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -20,7 +20,7 @@ package org.apache.lucene.facet.simple;
import java.io.IOException;
import java.util.List;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.BinaryDocValues;
@ -31,11 +31,11 @@ import org.apache.lucene.util.FixedBitSet;
public class TaxonomyFacetSumFloatAssociations extends TaxonomyFacets {
private final float[] values;
public TaxonomyFacetSumFloatAssociations(TaxonomyReader taxoReader, FacetsConfig config, SimpleFacetsCollector fc) throws IOException {
public TaxonomyFacetSumFloatAssociations(TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc) throws IOException {
this(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, taxoReader, config, fc);
}
public TaxonomyFacetSumFloatAssociations(String indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, SimpleFacetsCollector fc) throws IOException {
public TaxonomyFacetSumFloatAssociations(String indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc) throws IOException {
super(indexFieldName, taxoReader, config);
values = new float[taxoReader.getSize()];
sumValues(fc.getMatchingDocs());
@ -93,7 +93,7 @@ public class TaxonomyFacetSumFloatAssociations extends TaxonomyFacets {
}
@Override
public SimpleFacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
public FacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
if (topN <= 0) {
throw new IllegalArgumentException("topN must be > 0 (got: " + topN + ")");
}
@ -144,6 +144,6 @@ public class TaxonomyFacetSumFloatAssociations extends TaxonomyFacets {
labelValues[i] = new LabelAndValue(child.components[cp.length], ordAndValue.value);
}
return new SimpleFacetResult(sumValue, labelValues, childCount);
return new FacetResult(sumValue, labelValues, childCount);
}
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -20,7 +20,7 @@ package org.apache.lucene.facet.simple;
import java.io.IOException;
import java.util.List;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.BinaryDocValues;
@ -31,11 +31,11 @@ import org.apache.lucene.util.FixedBitSet;
public class TaxonomyFacetSumIntAssociations extends TaxonomyFacets {
private final int[] values;
public TaxonomyFacetSumIntAssociations(TaxonomyReader taxoReader, FacetsConfig config, SimpleFacetsCollector fc) throws IOException {
public TaxonomyFacetSumIntAssociations(TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc) throws IOException {
this(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, taxoReader, config, fc);
}
public TaxonomyFacetSumIntAssociations(String indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, SimpleFacetsCollector fc) throws IOException {
public TaxonomyFacetSumIntAssociations(String indexFieldName, TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector fc) throws IOException {
super(indexFieldName, taxoReader, config);
values = new int[taxoReader.getSize()];
sumValues(fc.getMatchingDocs());
@ -93,7 +93,7 @@ public class TaxonomyFacetSumIntAssociations extends TaxonomyFacets {
}
@Override
public SimpleFacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
public FacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
if (topN <= 0) {
throw new IllegalArgumentException("topN must be > 0 (got: " + topN + ")");
}
@ -145,6 +145,6 @@ public class TaxonomyFacetSumIntAssociations extends TaxonomyFacets {
labelValues[i] = new LabelAndValue(child.components[cp.length], ordAndValue.value);
}
return new SimpleFacetResult(sumValue, labelValues, childCount);
return new FacetResult(sumValue, labelValues, childCount);
}
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -22,7 +22,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.AtomicReaderContext;
@ -46,7 +46,7 @@ public class TaxonomyFacetSumValueSource extends TaxonomyFacets {
* facet field {@link
* FacetsConfig#DEFAULT_INDEX_FIELD_NAME}. */
public TaxonomyFacetSumValueSource(TaxonomyReader taxoReader, FacetsConfig config,
SimpleFacetsCollector fc, ValueSource valueSource) throws IOException {
FacetsCollector fc, ValueSource valueSource) throws IOException {
this(new DocValuesOrdinalsReader(FacetsConfig.DEFAULT_INDEX_FIELD_NAME), taxoReader, config, fc, valueSource);
}
@ -54,7 +54,7 @@ public class TaxonomyFacetSumValueSource extends TaxonomyFacets {
* {@link ValueSource}, and pulls ordinals from the
* provided {@link OrdinalsReader}. */
public TaxonomyFacetSumValueSource(OrdinalsReader ordinalsReader, TaxonomyReader taxoReader,
FacetsConfig config, SimpleFacetsCollector fc, ValueSource valueSource) throws IOException {
FacetsConfig config, FacetsCollector fc, ValueSource valueSource) throws IOException {
super(ordinalsReader.getIndexFieldName(), taxoReader, config);
this.ordinalsReader = ordinalsReader;
values = new float[taxoReader.getSize()];
@ -140,7 +140,7 @@ public class TaxonomyFacetSumValueSource extends TaxonomyFacets {
}
@Override
public SimpleFacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
public FacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
if (topN <= 0) {
throw new IllegalArgumentException("topN must be > 0 (got: " + topN + ")");
}
@ -203,7 +203,7 @@ public class TaxonomyFacetSumValueSource extends TaxonomyFacets {
labelValues[i] = new LabelAndValue(child.components[cp.length], ordAndValue.value);
}
return new SimpleFacetResult(sumValues, labelValues, childCount);
return new FacetResult(sumValues, labelValues, childCount);
}
/** {@link ValueSource} that returns the score for each
@ -214,7 +214,7 @@ public class TaxonomyFacetSumValueSource extends TaxonomyFacets {
public FunctionValues getValues(@SuppressWarnings("rawtypes") Map context, AtomicReaderContext readerContext) throws IOException {
final Scorer scorer = (Scorer) context.get("scorer");
if (scorer == null) {
throw new IllegalStateException("scores are missing; be sure to pass keepScores=true to SimpleFacetsCollector");
throw new IllegalStateException("scores are missing; be sure to pass keepScores=true to FacetsCollector");
}
return new DoubleDocValues(this) {
@Override

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -57,14 +57,14 @@ abstract class TaxonomyFacets extends Facets {
}
@Override
public List<SimpleFacetResult> getAllDims(int topN) throws IOException {
public List<FacetResult> getAllDims(int topN) throws IOException {
int ord = children[TaxonomyReader.ROOT_ORDINAL];
List<SimpleFacetResult> results = new ArrayList<SimpleFacetResult>();
List<FacetResult> results = new ArrayList<FacetResult>();
while (ord != TaxonomyReader.INVALID_ORDINAL) {
String dim = taxoReader.getPath(ord).components[0];
FacetsConfig.DimConfig dimConfig = config.getDimConfig(dim);
if (dimConfig.indexFieldName.equals(indexFieldName)) {
SimpleFacetResult result = getTopChildren(topN, dim);
FacetResult result = getTopChildren(topN, dim);
if (result != null) {
results.add(result);
}
@ -74,9 +74,9 @@ abstract class TaxonomyFacets extends Facets {
// Sort by highest value, tie break by value:
Collections.sort(results,
new Comparator<SimpleFacetResult>() {
new Comparator<FacetResult>() {
@Override
public int compare(SimpleFacetResult a, SimpleFacetResult b) {
public int compare(FacetResult a, FacetResult b) {
if (a.value.doubleValue() > b.value.doubleValue()) {
return -1;
} else if (b.value.doubleValue() > a.value.doubleValue()) {

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -20,7 +20,7 @@ package org.apache.lucene.facet.taxonomy;
import java.util.Arrays;
import java.util.regex.Pattern;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.FacetsConfig;
import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE;

View File

@ -21,7 +21,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -20,24 +20,16 @@ package org.apache.lucene.facet;
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.facet.simple.CachedOrdinalsReader;
import org.apache.lucene.facet.simple.DocValuesOrdinalsReader;
import org.apache.lucene.facet.simple.Facets;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.simple.FastTaxonomyFacetCounts;
import org.apache.lucene.facet.simple.OrdinalsReader;
import org.apache.lucene.facet.simple.SimpleFacetsCollector;
import org.apache.lucene.facet.simple.TaxonomyFacetCounts;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.util.LuceneTestCase;
public abstract class FacetTestCase extends LuceneTestCase {
public Facets getTaxonomyFacetCounts(TaxonomyReader taxoReader, FacetsConfig config, SimpleFacetsCollector c) throws IOException {
public Facets getTaxonomyFacetCounts(TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector c) throws IOException {
return getTaxonomyFacetCounts(taxoReader, config, c, FacetsConfig.DEFAULT_INDEX_FIELD_NAME);
}
public Facets getTaxonomyFacetCounts(TaxonomyReader taxoReader, FacetsConfig config, SimpleFacetsCollector c, String indexFieldName) throws IOException {
public Facets getTaxonomyFacetCounts(TaxonomyReader taxoReader, FacetsConfig config, FacetsCollector c, String indexFieldName) throws IOException {
Facets facets;
if (random().nextBoolean()) {
facets = new FastTaxonomyFacetCounts(indexFieldName, taxoReader, config, c);

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -125,7 +125,7 @@ public class TestDrillDownQuery extends FacetTestCase {
IndexSearcher searcher = newSearcher(reader);
// test (a/1 OR a/2) AND b/1
SimpleDrillDownQuery q = new SimpleDrillDownQuery(config);
DrillDownQuery q = new DrillDownQuery(config);
q.add("a", "1");
q.add("a", "2");
q.add("b", "1");
@ -137,7 +137,7 @@ public class TestDrillDownQuery extends FacetTestCase {
IndexSearcher searcher = newSearcher(reader);
// Making sure the query yields 25 documents with the facet "a"
SimpleDrillDownQuery q = new SimpleDrillDownQuery(config);
DrillDownQuery q = new DrillDownQuery(config);
q.add("a");
System.out.println("q=" + q);
QueryUtils.check(q);
@ -146,13 +146,13 @@ public class TestDrillDownQuery extends FacetTestCase {
// Making sure the query yields 5 documents with the facet "b" and the
// previous (facet "a") query as a base query
SimpleDrillDownQuery q2 = new SimpleDrillDownQuery(config, q);
DrillDownQuery q2 = new DrillDownQuery(config, q);
q2.add("b");
docs = searcher.search(q2, 100);
assertEquals(5, docs.totalHits);
// Making sure that a query of both facet "a" and facet "b" yields 5 results
SimpleDrillDownQuery q3 = new SimpleDrillDownQuery(config);
DrillDownQuery q3 = new DrillDownQuery(config);
q3.add("a");
q3.add("b");
docs = searcher.search(q3, 100);
@ -161,7 +161,7 @@ public class TestDrillDownQuery extends FacetTestCase {
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
// would gather together 10 results (10%..)
Query fooQuery = new TermQuery(new Term("content", "foo"));
SimpleDrillDownQuery q4 = new SimpleDrillDownQuery(config, fooQuery);
DrillDownQuery q4 = new DrillDownQuery(config, fooQuery);
q4.add("b");
docs = searcher.search(q4, 100);
assertEquals(10, docs.totalHits);
@ -171,12 +171,12 @@ public class TestDrillDownQuery extends FacetTestCase {
IndexSearcher searcher = newSearcher(reader);
// Create the base query to start with
SimpleDrillDownQuery q = new SimpleDrillDownQuery(config);
DrillDownQuery q = new DrillDownQuery(config);
q.add("a");
// Making sure the query yields 5 documents with the facet "b" and the
// previous (facet "a") query as a base query
SimpleDrillDownQuery q2 = new SimpleDrillDownQuery(config, q);
DrillDownQuery q2 = new DrillDownQuery(config, q);
q2.add("b");
TopDocs docs = searcher.search(q2, 100);
assertEquals(5, docs.totalHits);
@ -184,7 +184,7 @@ public class TestDrillDownQuery extends FacetTestCase {
// Check that content:foo (which yields 50% results) and facet/b (which yields 20%)
// would gather together 10 results (10%..)
Query fooQuery = new TermQuery(new Term("content", "foo"));
SimpleDrillDownQuery q4 = new SimpleDrillDownQuery(config, fooQuery);
DrillDownQuery q4 = new DrillDownQuery(config, fooQuery);
q4.add("b");
docs = searcher.search(q4, 100);
assertEquals(10, docs.totalHits);
@ -203,7 +203,7 @@ public class TestDrillDownQuery extends FacetTestCase {
}
// create a drill-down query with category "a", scores should not change
SimpleDrillDownQuery q2 = new SimpleDrillDownQuery(config, q);
DrillDownQuery q2 = new DrillDownQuery(config, q);
q2.add("a");
docs = searcher.search(q2, reader.maxDoc()); // fetch all available docs to this query
for (ScoreDoc sd : docs.scoreDocs) {
@ -215,7 +215,7 @@ public class TestDrillDownQuery extends FacetTestCase {
// verify that drill-down queries (with no base query) returns 0.0 score
IndexSearcher searcher = newSearcher(reader);
SimpleDrillDownQuery q = new SimpleDrillDownQuery(config);
DrillDownQuery q = new DrillDownQuery(config);
q.add("a");
TopDocs docs = searcher.search(q, reader.maxDoc()); // fetch all available docs to this query
for (ScoreDoc sd : docs.scoreDocs) {
@ -225,19 +225,19 @@ public class TestDrillDownQuery extends FacetTestCase {
public void testTermNonDefault() {
String aField = config.getDimConfig("a").indexFieldName;
Term termA = SimpleDrillDownQuery.term(aField, "a");
Term termA = DrillDownQuery.term(aField, "a");
assertEquals(new Term(aField, "a"), termA);
String bField = config.getDimConfig("b").indexFieldName;
Term termB = SimpleDrillDownQuery.term(bField, "b");
Term termB = DrillDownQuery.term(bField, "b");
assertEquals(new Term(bField, "b"), termB);
}
public void testClone() throws Exception {
SimpleDrillDownQuery q = new SimpleDrillDownQuery(config, new MatchAllDocsQuery());
DrillDownQuery q = new DrillDownQuery(config, new MatchAllDocsQuery());
q.add("a");
SimpleDrillDownQuery clone = q.clone();
DrillDownQuery clone = q.clone();
clone.add("b");
assertFalse("query wasn't cloned: source=" + q + " clone=" + clone, q.toString().equals(clone.toString()));
@ -245,7 +245,7 @@ public class TestDrillDownQuery extends FacetTestCase {
public void testNoDrillDown() throws Exception {
Query base = new MatchAllDocsQuery();
SimpleDrillDownQuery q = new SimpleDrillDownQuery(config, base);
DrillDownQuery q = new DrillDownQuery(config, base);
Query rewrite = q.rewrite(reader).rewrite(reader);
assertSame(base, rewrite);
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -32,7 +32,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.simple.SimpleDrillSideways.SimpleDrillSidewaysResult;
import org.apache.lucene.facet.DrillSideways.DrillSidewaysResult;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -62,7 +62,7 @@ import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util._TestUtil;
public class TestSimpleDrillSideways extends FacetTestCase {
public class TestDrillSideways extends FacetTestCase {
public void testBasic() throws Exception {
Directory dir = newDirectory();
@ -110,14 +110,14 @@ public class TestSimpleDrillSideways extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleDrillSideways ds = new SimpleDrillSideways(searcher, config, taxoReader);
DrillSideways ds = new DrillSideways(searcher, config, taxoReader);
// Simple case: drill-down on a single field; in this
// case: drill-down on a single field; in this
// case the drill-sideways + drill-down counts ==
// drill-down of just the query:
SimpleDrillDownQuery ddq = new SimpleDrillDownQuery(config);
DrillDownQuery ddq = new DrillDownQuery(config);
ddq.add("Author", "Lisa");
SimpleDrillSidewaysResult r = ds.search(null, ddq, 10);
DrillSidewaysResult r = ds.search(null, ddq, 10);
assertEquals(2, r.hits.totalHits);
// Publish Date is only drill-down, and Lisa published
// one in 2012 and one in 2010:
@ -132,7 +132,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
// drill-down on a single field; in this case the
// drill-sideways + drill-down counts == drill-down of
// just the query:
ddq = new SimpleDrillDownQuery(config);
ddq = new DrillDownQuery(config);
ddq.add("Author", "Lisa");
r = ds.search(null, ddq, 10);
@ -148,7 +148,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
// Another simple case: drill-down on on single fields
// but OR of two values
ddq = new SimpleDrillDownQuery(config);
ddq = new DrillDownQuery(config);
ddq.add("Author", "Lisa");
ddq.add("Author", "Bob");
r = ds.search(null, ddq, 10);
@ -162,7 +162,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
assertEquals("value=5 childCount=4\n Lisa (2)\n Bob (1)\n Susan (1)\n Frank (1)\n", r.facets.getTopChildren(10, "Author").toString());
// More interesting case: drill-down on two fields
ddq = new SimpleDrillDownQuery(config);
ddq = new DrillDownQuery(config);
ddq.add("Author", "Lisa");
ddq.add("Publish Date", "2010");
r = ds.search(null, ddq, 10);
@ -176,7 +176,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
// Even more interesting case: drill down on two fields,
// but one of them is OR
ddq = new SimpleDrillDownQuery(config);
ddq = new DrillDownQuery(config);
// Drill down on Lisa or Bob:
ddq.add("Author", "Lisa");
@ -192,7 +192,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
assertEquals("value=2 childCount=2\n Bob (1)\n Lisa (1)\n", r.facets.getTopChildren(10, "Author").toString());
// Test drilling down on invalid field:
ddq = new SimpleDrillDownQuery(config);
ddq = new DrillDownQuery(config);
ddq.add("Foobar", "Baz");
r = ds.search(null, ddq, 10);
assertEquals(0, r.hits.totalHits);
@ -200,7 +200,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
assertNull(r.facets.getTopChildren(10, "Foobar"));
// Test drilling down on valid term or'd with invalid term:
ddq = new SimpleDrillDownQuery(config);
ddq = new DrillDownQuery(config);
ddq.add("Author", "Lisa");
ddq.add("Author", "Tom");
r = ds.search(null, ddq, 10);
@ -215,7 +215,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
// LUCENE-4915: test drilling down on a dimension but
// NOT facet counting it:
ddq = new SimpleDrillDownQuery(config);
ddq = new DrillDownQuery(config);
ddq.add("Author", "Lisa");
ddq.add("Author", "Tom");
r = ds.search(null, ddq, 10);
@ -225,7 +225,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
assertEquals("value=2 childCount=2\n 2010 (1)\n 2012 (1)\n", r.facets.getTopChildren(10, "Publish Date").toString());
// Test main query gets null scorer:
ddq = new SimpleDrillDownQuery(config, new TermQuery(new Term("foobar", "baz")));
ddq = new DrillDownQuery(config, new TermQuery(new Term("foobar", "baz")));
ddq.add("Author", "Lisa");
r = ds.search(null, ddq, 10);
@ -273,9 +273,9 @@ public class TestSimpleDrillSideways extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleDrillDownQuery ddq = new SimpleDrillDownQuery(config);
DrillDownQuery ddq = new DrillDownQuery(config);
ddq.add("Author", "Lisa");
SimpleDrillSidewaysResult r = new SimpleDrillSideways(searcher, config, taxoReader).search(null, ddq, 10);
DrillSidewaysResult r = new DrillSideways(searcher, config, taxoReader).search(null, ddq, 10);
assertEquals(1, r.hits.totalHits);
// Publish Date is only drill-down, and Lisa published
@ -333,9 +333,9 @@ public class TestSimpleDrillSideways extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleDrillDownQuery ddq = new SimpleDrillDownQuery(config);
DrillDownQuery ddq = new DrillDownQuery(config);
ddq.add("dim", "a");
SimpleDrillSidewaysResult r = new SimpleDrillSideways(searcher, config, taxoReader).search(null, ddq, 10);
DrillSidewaysResult r = new DrillSideways(searcher, config, taxoReader).search(null, ddq, 10);
assertEquals(3, r.hits.totalHits);
assertEquals("value=6 childCount=4\n a (3)\n b (1)\n c (1)\n d (1)\n", r.facets.getTopChildren(10, "dim").toString());
@ -423,7 +423,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
Set<String> values = new HashSet<String>();
while (values.size() < valueCount) {
String s = _TestUtil.randomRealisticUnicodeString(random());
//String s = _TestUtil.randomSimpleString(random());
//String s = _TestUtil.randomString(random());
if (s.length() > 0) {
values.add(s);
}
@ -621,7 +621,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
baseQuery = new TermQuery(new Term("content", contentToken));
}
SimpleDrillDownQuery ddq = new SimpleDrillDownQuery(config, baseQuery);
DrillDownQuery ddq = new DrillDownQuery(config, baseQuery);
for(int dim=0;dim<numDims;dim++) {
if (drillDowns[dim] != null) {
@ -658,7 +658,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
// Verify docs are always collected in order. If we
// had an AssertingScorer it could catch it when
// Weight.scoresDocsOutOfOrder lies!:
new SimpleDrillSideways(s, config, tr).search(ddq,
new DrillSideways(s, config, tr).search(ddq,
new Collector() {
int lastDocID;
@ -691,7 +691,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
// drill-down values, beacuse in that case it's
// easily possible for one of the DD terms to be on
// a future docID:
new SimpleDrillSideways(s, config, tr) {
new DrillSideways(s, config, tr) {
@Override
protected boolean scoreSubDocsAtOnce() {
return true;
@ -699,19 +699,19 @@ public class TestSimpleDrillSideways extends FacetTestCase {
}.search(ddq, new AssertingSubDocsAtOnceCollector());
}
SimpleTestFacetResult expected = slowDrillSidewaysSearch(s, docs, contentToken, drillDowns, dimValues, filter);
TestFacetResult expected = slowDrillSidewaysSearch(s, docs, contentToken, drillDowns, dimValues, filter);
Sort sort = new Sort(new SortField("id", SortField.Type.STRING));
// nocommit subclass & override to use FacetsTestCase.getFacetCounts
SimpleDrillSideways ds;
DrillSideways ds;
if (doUseDV) {
ds = new SimpleDrillSideways(s, config, sortedSetDVState);
ds = new DrillSideways(s, config, sortedSetDVState);
} else {
ds = new SimpleDrillSideways(s, config, tr);
ds = new DrillSideways(s, config, tr);
}
// Retrieve all facets:
SimpleDrillSidewaysResult actual = ds.search(ddq, filter, null, numDocs, sort, true, true);
DrillSidewaysResult actual = ds.search(ddq, filter, null, numDocs, sort, true, true);
TopDocs hits = s.search(baseQuery, numDocs);
Map<String,Float> scores = new HashMap<String,Float>();
@ -765,7 +765,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
}
}
private static class SimpleTestFacetResult {
private static class TestFacetResult {
List<Doc> hits;
int[][] counts;
int[] uniqueCounts;
@ -822,7 +822,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
return topNIDs;
}
private SimpleTestFacetResult slowDrillSidewaysSearch(IndexSearcher s, List<Doc> docs,
private TestFacetResult slowDrillSidewaysSearch(IndexSearcher s, List<Doc> docs,
String contentToken, String[][] drillDowns,
String[][] dimValues, Filter onlyEven) throws Exception {
int numDims = dimValues.length;
@ -896,7 +896,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
Collections.sort(hits);
SimpleTestFacetResult res = new SimpleTestFacetResult();
TestFacetResult res = new TestFacetResult();
res.hits = hits;
res.counts = new int[numDims][];
res.uniqueCounts = new int[numDims];
@ -918,8 +918,8 @@ public class TestSimpleDrillSideways extends FacetTestCase {
return res;
}
void verifyEquals(String[][] dimValues, IndexSearcher s, SimpleTestFacetResult expected,
SimpleDrillSidewaysResult actual, Map<String,Float> scores, boolean isSortedSetDV) throws Exception {
void verifyEquals(String[][] dimValues, IndexSearcher s, TestFacetResult expected,
DrillSidewaysResult actual, Map<String,Float> scores, boolean isSortedSetDV) throws Exception {
if (VERBOSE) {
System.out.println(" verify totHits=" + expected.hits.size());
}
@ -937,7 +937,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
for(int dim=0;dim<expected.counts.length;dim++) {
int topN = random().nextBoolean() ? dimValues[dim].length : _TestUtil.nextInt(random(), 1, dimValues[dim].length);
SimpleFacetResult fr = actual.facets.getTopChildren(topN, "dim"+dim);
FacetResult fr = actual.facets.getTopChildren(topN, "dim"+dim);
if (VERBOSE) {
System.out.println(" dim" + dim + " topN=" + topN + " (vs " + dimValues[dim].length + " unique values)");
System.out.println(" actual");
@ -1014,7 +1014,7 @@ public class TestSimpleDrillSideways extends FacetTestCase {
assertEquals(setCount, actualValues.size());
}
// nocommit if we add this to SimpleFR then re-enable this:
// nocommit if we add this to FR then re-enable this:
// assertEquals("dim=" + dim, expected.uniqueCounts[dim], fr.getNumValidDescendants());
}
}
@ -1030,11 +1030,11 @@ public class TestSimpleDrillSideways extends FacetTestCase {
// Count "Author"
FacetsConfig config = new FacetsConfig();
SimpleDrillSideways ds = new SimpleDrillSideways(searcher, config, taxoReader);
SimpleDrillDownQuery ddq = new SimpleDrillDownQuery(config);
DrillSideways ds = new DrillSideways(searcher, config, taxoReader);
DrillDownQuery ddq = new DrillDownQuery(config);
ddq.add("Author", "Lisa");
SimpleDrillSidewaysResult r = ds.search(ddq, 10); // this used to fail on IllegalArgEx
DrillSidewaysResult r = ds.search(ddq, 10); // this used to fail on IllegalArgEx
assertEquals(0, r.hits.totalHits);
r = ds.search(ddq, null, null, 10, new Sort(new SortField("foo", SortField.Type.INT)), false, false); // this used to fail on IllegalArgEx

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -95,7 +95,7 @@ public class TestMultipleIndexFields extends FacetTestCase {
// prepare searcher to search against
IndexSearcher searcher = newSearcher(ir);
SimpleFacetsCollector sfc = performSearch(tr, ir, searcher);
FacetsCollector sfc = performSearch(tr, ir, searcher);
// Obtain facets results and hand-test them
assertCorrectResults(getTaxonomyFacetCounts(tr, config, sfc));
@ -129,7 +129,7 @@ public class TestMultipleIndexFields extends FacetTestCase {
// prepare searcher to search against
IndexSearcher searcher = newSearcher(ir);
SimpleFacetsCollector sfc = performSearch(tr, ir, searcher);
FacetsCollector sfc = performSearch(tr, ir, searcher);
Map<String,Facets> facetsMap = new HashMap<String,Facets>();
facetsMap.put("Author", getTaxonomyFacetCounts(tr, config, sfc, "$author"));
@ -169,7 +169,7 @@ public class TestMultipleIndexFields extends FacetTestCase {
// prepare searcher to search against
IndexSearcher searcher = newSearcher(ir);
SimpleFacetsCollector sfc = performSearch(tr, ir, searcher);
FacetsCollector sfc = performSearch(tr, ir, searcher);
Map<String,Facets> facetsMap = new HashMap<String,Facets>();
Facets facets2 = getTaxonomyFacetCounts(tr, config, sfc, "$music");
@ -222,7 +222,7 @@ public class TestMultipleIndexFields extends FacetTestCase {
// prepare searcher to search against
IndexSearcher searcher = newSearcher(ir);
SimpleFacetsCollector sfc = performSearch(tr, ir, searcher);
FacetsCollector sfc = performSearch(tr, ir, searcher);
Map<String,Facets> facetsMap = new HashMap<String,Facets>();
facetsMap.put("Band", getTaxonomyFacetCounts(tr, config, sfc, "$bands"));
@ -264,7 +264,7 @@ public class TestMultipleIndexFields extends FacetTestCase {
// prepare searcher to search against
IndexSearcher searcher = newSearcher(ir);
SimpleFacetsCollector sfc = performSearch(tr, ir, searcher);
FacetsCollector sfc = performSearch(tr, ir, searcher);
Map<String,Facets> facetsMap = new HashMap<String,Facets>();
Facets facets2 = getTaxonomyFacetCounts(tr, config, sfc, "$music");
@ -289,9 +289,9 @@ public class TestMultipleIndexFields extends FacetTestCase {
assertEquals("value=3 childCount=3\n Mark Twain (1)\n Stephen King (1)\n Kurt Vonnegut (1)\n", facets.getTopChildren(10, "Author").toString());
}
private SimpleFacetsCollector performSearch(TaxonomyReader tr, IndexReader ir,
private FacetsCollector performSearch(TaxonomyReader tr, IndexReader ir,
IndexSearcher searcher) throws IOException {
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
Facets.search(searcher, new MatchAllDocsQuery(), 10, sfc);
return sfc;
}

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -34,7 +34,7 @@ import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.LongField;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.simple.SimpleDrillSideways.SimpleDrillSidewaysResult;
import org.apache.lucene.facet.DrillSideways.DrillSidewaysResult;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -69,7 +69,7 @@ public class TestRangeFacets extends FacetTestCase {
IndexReader r = w.getReader();
w.close();
SimpleFacetsCollector fc = new SimpleFacetsCollector();
FacetsCollector fc = new FacetsCollector();
IndexSearcher s = newSearcher(r);
s.search(new MatchAllDocsQuery(), fc);
@ -80,7 +80,7 @@ public class TestRangeFacets extends FacetTestCase {
new LongRange("90 or above", 90L, true, 100L, false),
new LongRange("over 1000", 1000L, false, Long.MAX_VALUE, true));
SimpleFacetResult result = facets.getTopChildren(10, "field");
FacetResult result = facets.getTopChildren(10, "field");
assertEquals("value=101 childCount=5\n less than 10 (10)\n less than or equal to 10 (11)\n over 90 (9)\n 90 or above (10)\n over 1000 (1)\n",
result.toString());
@ -119,15 +119,15 @@ public class TestRangeFacets extends FacetTestCase {
IndexSearcher s = newSearcher(r);
SimpleDrillSideways ds = new SimpleDrillSideways(s, config, tr) {
DrillSideways ds = new DrillSideways(s, config, tr) {
@Override
protected Facets buildFacetsResult(SimpleFacetsCollector drillDowns, SimpleFacetsCollector[] drillSideways, String[] drillSidewaysDims) throws IOException {
protected Facets buildFacetsResult(FacetsCollector drillDowns, FacetsCollector[] drillSideways, String[] drillSidewaysDims) throws IOException {
// nocommit this is awkward... can we improve?
// nocommit is drillDowns allowed to be null?
// should it?
SimpleFacetsCollector dimFC = drillDowns;
SimpleFacetsCollector fieldFC = drillDowns;
FacetsCollector dimFC = drillDowns;
FacetsCollector fieldFC = drillDowns;
if (drillSideways != null) {
for(int i=0;i<drillSideways.length;i++) {
String dim = drillSidewaysDims[i];
@ -158,8 +158,8 @@ public class TestRangeFacets extends FacetTestCase {
};
// First search, no drill downs:
SimpleDrillDownQuery ddq = new SimpleDrillDownQuery(config);
SimpleDrillSidewaysResult dsr = ds.search(null, ddq, 10);
DrillDownQuery ddq = new DrillDownQuery(config);
DrillSidewaysResult dsr = ds.search(null, ddq, 10);
assertEquals(100, dsr.hits.totalHits);
assertEquals("value=100 childCount=2\n b (75)\n a (25)\n", dsr.facets.getTopChildren(10, "dim").toString());
@ -167,7 +167,7 @@ public class TestRangeFacets extends FacetTestCase {
dsr.facets.getTopChildren(10, "field").toString());
// Second search, drill down on dim=b:
ddq = new SimpleDrillDownQuery(config);
ddq = new DrillDownQuery(config);
ddq.add("dim", "b");
dsr = ds.search(null, ddq, 10);
@ -177,7 +177,7 @@ public class TestRangeFacets extends FacetTestCase {
dsr.facets.getTopChildren(10, "field").toString());
// Third search, drill down on "less than or equal to 10":
ddq = new SimpleDrillDownQuery(config);
ddq = new DrillDownQuery(config);
ddq.add("field", NumericRangeQuery.newLongRange("field", 0L, 10L, true, true));
dsr = ds.search(null, ddq, 10);
@ -201,7 +201,7 @@ public class TestRangeFacets extends FacetTestCase {
IndexReader r = w.getReader();
SimpleFacetsCollector fc = new SimpleFacetsCollector();
FacetsCollector fc = new FacetsCollector();
IndexSearcher s = newSearcher(r);
s.search(new MatchAllDocsQuery(), fc);
@ -231,7 +231,7 @@ public class TestRangeFacets extends FacetTestCase {
IndexReader r = w.getReader();
SimpleFacetsCollector fc = new SimpleFacetsCollector();
FacetsCollector fc = new FacetsCollector();
IndexSearcher s = newSearcher(r);
s.search(new MatchAllDocsQuery(), fc);
@ -308,10 +308,10 @@ public class TestRangeFacets extends FacetTestCase {
}
}
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
s.search(new MatchAllDocsQuery(), sfc);
Facets facets = new RangeFacetCounts("field", sfc, ranges);
SimpleFacetResult result = facets.getTopChildren(10, "field");
FacetResult result = facets.getTopChildren(10, "field");
assertEquals(numRange, result.labelValues.length);
for(int rangeID=0;rangeID<numRange;rangeID++) {
if (VERBOSE) {
@ -324,7 +324,7 @@ public class TestRangeFacets extends FacetTestCase {
LongRange range = ranges[rangeID];
// Test drill-down:
SimpleDrillDownQuery ddq = new SimpleDrillDownQuery(config);
DrillDownQuery ddq = new DrillDownQuery(config);
ddq.add("field", NumericRangeQuery.newLongRange("field", range.min, range.max, range.minInclusive, range.maxInclusive));
assertEquals(expectedCounts[rangeID], s.search(ddq, 10).totalHits);
}
@ -392,10 +392,10 @@ public class TestRangeFacets extends FacetTestCase {
}
}
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
s.search(new MatchAllDocsQuery(), sfc);
Facets facets = new RangeFacetCounts("field", sfc, ranges);
SimpleFacetResult result = facets.getTopChildren(10, "field");
FacetResult result = facets.getTopChildren(10, "field");
assertEquals(numRange, result.labelValues.length);
for(int rangeID=0;rangeID<numRange;rangeID++) {
if (VERBOSE) {
@ -408,7 +408,7 @@ public class TestRangeFacets extends FacetTestCase {
FloatRange range = ranges[rangeID];
// Test drill-down:
SimpleDrillDownQuery ddq = new SimpleDrillDownQuery(config);
DrillDownQuery ddq = new DrillDownQuery(config);
ddq.add("field", NumericRangeQuery.newFloatRange("field", range.min, range.max, range.minInclusive, range.maxInclusive));
assertEquals(expectedCounts[rangeID], s.search(ddq, 10).totalHits);
}
@ -476,10 +476,10 @@ public class TestRangeFacets extends FacetTestCase {
}
}
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
s.search(new MatchAllDocsQuery(), sfc);
Facets facets = new RangeFacetCounts("field", sfc, ranges);
SimpleFacetResult result = facets.getTopChildren(10, "field");
FacetResult result = facets.getTopChildren(10, "field");
assertEquals(numRange, result.labelValues.length);
for(int rangeID=0;rangeID<numRange;rangeID++) {
if (VERBOSE) {
@ -492,7 +492,7 @@ public class TestRangeFacets extends FacetTestCase {
DoubleRange range = ranges[rangeID];
// Test drill-down:
SimpleDrillDownQuery ddq = new SimpleDrillDownQuery(config);
DrillDownQuery ddq = new DrillDownQuery(config);
ddq.add("field", NumericRangeQuery.newDoubleRange("field", range.min, range.max, range.minInclusive, range.maxInclusive));
assertEquals(expectedCounts[rangeID], s.search(ddq, 10).totalHits);
}
@ -521,7 +521,7 @@ public class TestRangeFacets extends FacetTestCase {
IndexReader r = w.getReader();
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
IndexSearcher s = newSearcher(r);
s.search(new MatchAllDocsQuery(), sfc);

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -27,7 +27,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.simple.SearcherTaxonomyManager.SearcherAndTaxonomy;
import org.apache.lucene.facet.SearcherTaxonomyManager.SearcherAndTaxonomy;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.IndexWriter;
@ -129,10 +129,10 @@ public class TestSearcherTaxonomyManager extends FacetTestCase {
//System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
int topN = _TestUtil.nextInt(random(), 1, 20);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
pair.searcher.search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(pair.taxonomyReader, config, sfc);
SimpleFacetResult result = facets.getTopChildren(10, "field");
FacetResult result = facets.getTopChildren(10, "field");
if (pair.searcher.getIndexReader().numDocs() > 0) {
//System.out.println(pair.taxonomyReader.getSize());
assertTrue(result.childCount > 0);
@ -140,7 +140,7 @@ public class TestSearcherTaxonomyManager extends FacetTestCase {
}
//if (VERBOSE) {
//System.out.println("TEST: facets=" + FacetTestUtils.toSimpleString(results.get(0)));
//System.out.println("TEST: facets=" + FacetTestUtils.toString(results.get(0)));
//}
} finally {
mgr.release(pair);

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -63,7 +63,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Per-top-reader state:
SortedSetDocValuesReaderState state = new SortedSetDocValuesReaderState(searcher.getIndexReader());
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
@ -73,7 +73,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
assertEquals("value=1 childCount=1\n baz (1)\n", facets.getTopChildren(10, "b").toString());
// DrillDown:
SimpleDrillDownQuery q = new SimpleDrillDownQuery(config);
DrillDownQuery q = new DrillDownQuery(config);
q.add("a", "foo");
q.add("b", "baz");
TopDocs hits = searcher.search(q, 1);
@ -108,7 +108,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
IndexSearcher searcher = newSearcher(writer.getReader());
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
@ -164,12 +164,12 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Per-top-reader state:
SortedSetDocValuesReaderState state = new SortedSetDocValuesReaderState(searcher.getIndexReader());
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
SortedSetDocValuesFacetCounts facets = new SortedSetDocValuesFacetCounts(state, c);
// Ask for top 10 labels for any dims that have counts:
List<SimpleFacetResult> results = facets.getAllDims(10);
List<FacetResult> results = facets.getAllDims(10);
assertEquals(3, results.size());
assertEquals("value=3 childCount=3\n foo1 (1)\n foo2 (1)\n foo3 (1)\n", results.get(0).toString());
@ -210,7 +210,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Per-top-reader state:
SortedSetDocValuesReaderState state = new SortedSetDocValuesReaderState(searcher.getIndexReader());
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = new SortedSetDocValuesFacetCounts(state, c);

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -101,7 +101,7 @@ public class TestTaxonomyFacetAssociations extends FacetTestCase {
public void testIntSumAssociation() throws Exception {
SimpleFacetsCollector fc = new SimpleFacetsCollector();
FacetsCollector fc = new FacetsCollector();
IndexSearcher searcher = newSearcher(reader);
searcher.search(new MatchAllDocsQuery(), fc);
@ -113,7 +113,7 @@ public class TestTaxonomyFacetAssociations extends FacetTestCase {
}
public void testFloatSumAssociation() throws Exception {
SimpleFacetsCollector fc = new SimpleFacetsCollector();
FacetsCollector fc = new FacetsCollector();
IndexSearcher searcher = newSearcher(reader);
searcher.search(new MatchAllDocsQuery(), fc);
@ -127,7 +127,7 @@ public class TestTaxonomyFacetAssociations extends FacetTestCase {
/** Make sure we can test both int and float assocs in one
* index, as long as we send each to a different field. */
public void testIntAndFloatAssocation() throws Exception {
SimpleFacetsCollector fc = new SimpleFacetsCollector();
FacetsCollector fc = new FacetsCollector();
IndexSearcher searcher = newSearcher(reader);
searcher.search(new MatchAllDocsQuery(), fc);
@ -142,7 +142,7 @@ public class TestTaxonomyFacetAssociations extends FacetTestCase {
}
public void testWrongIndexFieldName() throws Exception {
SimpleFacetsCollector fc = new SimpleFacetsCollector();
FacetsCollector fc = new FacetsCollector();
IndexSearcher searcher = newSearcher(reader);
searcher.search(new MatchAllDocsQuery(), fc);

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -97,7 +97,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
// Aggregate the facet counts:
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
@ -112,9 +112,9 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
assertEquals("value=5 childCount=4\n Lisa (2)\n Bob (1)\n Susan (1)\n Frank (1)\n", facets.getTopChildren(10, "Author").toString());
// Now user drills down on Publish Date/2010:
SimpleDrillDownQuery q2 = new SimpleDrillDownQuery(config);
DrillDownQuery q2 = new DrillDownQuery(config);
q2.add("Publish Date", "2010");
c = new SimpleFacetsCollector();
c = new FacetsCollector();
searcher.search(q2, c);
facets = new FastTaxonomyFacetCounts(taxoReader, config, c);
assertEquals("value=2 childCount=2\n Bob (1)\n Lisa (1)\n", facets.getTopChildren(10, "Author").toString());
@ -176,13 +176,13 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = getTaxonomyFacetCounts(taxoReader, new FacetsConfig(), c);
// Ask for top 10 labels for any dims that have counts:
List<SimpleFacetResult> results = facets.getAllDims(10);
List<FacetResult> results = facets.getAllDims(10);
assertEquals(3, results.size());
assertEquals("value=3 childCount=3\n foo1 (1)\n foo2 (1)\n foo3 (1)\n", results.get(0).toString());
@ -214,7 +214,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
// Uses default $facets field:
@ -230,7 +230,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
}
// Ask for top 10 labels for any dims that have counts:
List<SimpleFacetResult> results = facets.getAllDims(10);
List<FacetResult> results = facets.getAllDims(10);
assertTrue(results.isEmpty());
try {
@ -300,7 +300,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
// Aggregate the facet counts:
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
@ -308,7 +308,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// wrap collecting the "normal" hits and also facets:
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
SimpleFacetResult result = facets.getTopChildren(10, "a");
FacetResult result = facets.getTopChildren(10, "a");
assertEquals(1, result.labelValues.length);
assertEquals(1, result.labelValues[0].value.intValue());
@ -336,14 +336,14 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
assertEquals(1, facets.getSpecificValue("dim", "test\u001Fone"));
assertEquals(1, facets.getSpecificValue("dim", "test\u001Etwo"));
SimpleFacetResult result = facets.getTopChildren(10, "dim");
FacetResult result = facets.getTopChildren(10, "dim");
assertEquals("value=-1 childCount=2\n test\u001Fone (1)\n test\u001Etwo (1)\n", result.toString());
IOUtils.close(writer, taxoWriter, searcher.getIndexReader(), taxoReader, dir, taxoDir);
}
@ -377,7 +377,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// NRT open
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
@ -416,7 +416,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
// Aggregate the facet counts:
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
@ -425,7 +425,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
searcher.search(new MatchAllDocsQuery(), c);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, c);
SimpleFacetResult result = facets.getTopChildren(Integer.MAX_VALUE, "dim");
FacetResult result = facets.getTopChildren(Integer.MAX_VALUE, "dim");
assertEquals(numLabels, result.labelValues.length);
Set<String> allLabels = new HashSet<String>();
for (LabelAndValue labelValue : result.labelValues) {
@ -500,7 +500,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw, true);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets1 = getTaxonomyFacetCounts(taxoReader, config, sfc);
Facets facets2 = getTaxonomyFacetCounts(taxoReader, config, sfc, "$b");
@ -527,10 +527,10 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw, true);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
for (SimpleFacetResult result : facets.getAllDims(10)) {
for (FacetResult result : facets.getAllDims(10)) {
assertEquals(r.numDocs(), result.value.intValue());
}
@ -554,12 +554,12 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw, true);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
final SimpleFacetsCollector sfc = new SimpleFacetsCollector();
final FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
List<SimpleFacetResult> res1 = facets.getAllDims(10);
List<SimpleFacetResult> res2 = facets.getAllDims(10);
List<FacetResult> res1 = facets.getAllDims(10);
List<FacetResult> res2 = facets.getAllDims(10);
assertEquals("calling getFacetResults twice should return the .equals()=true result", res1, res2);
IOUtils.close(taxoWriter, iw, taxoReader, taxoDir, r, indexDir);
@ -582,7 +582,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw, true);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
@ -634,10 +634,10 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
// search for "f:a", only segments 1 and 3 should match results
Query q = new TermQuery(new Term("f", "a"));
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
indexSearcher.search(q, sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
SimpleFacetResult result = facets.getTopChildren(10, "A");
FacetResult result = facets.getTopChildren(10, "A");
assertEquals("wrong number of children", 2, result.labelValues.length);
for (LabelAndValue labelValue : result.labelValues) {
assertEquals("wrong weight for child " + labelValue.label, 2, labelValue.value.intValue());

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -271,11 +271,11 @@ public class TestTaxonomyFacetCounts2 extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = newSearcher(indexReader);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
TermQuery q = new TermQuery(A);
searcher.search(q, sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, getConfig(), sfc);
SimpleFacetResult result = facets.getTopChildren(NUM_CHILDREN_CP_A, CP_A);
FacetResult result = facets.getTopChildren(NUM_CHILDREN_CP_A, CP_A);
assertEquals(-1, result.value.intValue());
for(LabelAndValue labelValue : result.labelValues) {
assertEquals(termExpectedCounts.get(CP_A + "/" + labelValue.label), labelValue.value);
@ -295,12 +295,12 @@ public class TestTaxonomyFacetCounts2 extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = newSearcher(indexReader);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, getConfig(), sfc);
SimpleFacetResult result = facets.getTopChildren(NUM_CHILDREN_CP_A, CP_A);
FacetResult result = facets.getTopChildren(NUM_CHILDREN_CP_A, CP_A);
assertEquals(-1, result.value.intValue());
int prevValue = Integer.MAX_VALUE;
for(LabelAndValue labelValue : result.labelValues) {
@ -327,12 +327,12 @@ public class TestTaxonomyFacetCounts2 extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = newSearcher(indexReader);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, getConfig(), sfc);
SimpleFacetResult result = facets.getTopChildren(Integer.MAX_VALUE, CP_A);
FacetResult result = facets.getTopChildren(Integer.MAX_VALUE, CP_A);
assertEquals(-1, result.value.intValue());
for(LabelAndValue labelValue : result.labelValues) {
assertEquals(allExpectedCounts.get(CP_A + "/" + labelValue.label), labelValue.value);
@ -352,12 +352,12 @@ public class TestTaxonomyFacetCounts2 extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = newSearcher(indexReader);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, getConfig(), sfc);
SimpleFacetResult result = facets.getTopChildren(NUM_CHILDREN_CP_C, CP_C);
FacetResult result = facets.getTopChildren(NUM_CHILDREN_CP_C, CP_C);
assertEquals(allExpectedCounts.get(CP_C), result.value);
for(LabelAndValue labelValue : result.labelValues) {
assertEquals(allExpectedCounts.get(CP_C + "/" + labelValue.label), labelValue.value);

View File

@ -1,4 +1,4 @@
package org.apache.lucene.facet.simple;
package org.apache.lucene.facet;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -118,7 +118,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
taxoWriter.close();
// Aggregate the facet counts:
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
@ -183,13 +183,13 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
taxoWriter.close();
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, new FacetsConfig(), c, new IntFieldSource("num"));
// Ask for top 10 labels for any dims that have counts:
List<SimpleFacetResult> results = facets.getAllDims(10);
List<FacetResult> results = facets.getAllDims(10);
assertEquals(3, results.size());
assertEquals("value=60.0 childCount=3\n foo3 (30.0)\n foo2 (20.0)\n foo1 (10.0)\n", results.get(0).toString());
@ -226,13 +226,13 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
taxoWriter.close();
SimpleFacetsCollector c = new SimpleFacetsCollector();
FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c);
TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, config, c, new IntFieldSource("num"));
// Ask for top 10 labels for any dims that have counts:
List<SimpleFacetResult> results = facets.getAllDims(10);
List<FacetResult> results = facets.getAllDims(10);
assertTrue(results.isEmpty());
try {
@ -273,7 +273,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw, true);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleFacetsCollector fc = new SimpleFacetsCollector(true);
FacetsCollector fc = new FacetsCollector(true);
TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false);
ConstantScoreQuery csq = new ConstantScoreQuery(new MatchAllDocsQuery());
csq.setBoost(2.0f);
@ -306,7 +306,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw, true);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, new LongFieldSource("price"));
assertEquals("value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString());
@ -354,7 +354,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
@Override public String description() { return "score()"; }
};
SimpleFacetsCollector sfc = new SimpleFacetsCollector(true);
FacetsCollector sfc = new FacetsCollector(true);
TopScoreDocCollector tsdc = TopScoreDocCollector.create(10, true);
// score documents by their 'price' field - makes asserting the correct counts for the categories easier
Query q = new FunctionQuery(new LongFieldSource("price"));
@ -387,7 +387,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
ValueSource valueSource = new LongFieldSource("price");
SimpleFacetsCollector sfc = new SimpleFacetsCollector();
FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, valueSource);
@ -416,7 +416,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw, true);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
SimpleFacetsCollector sfc = new SimpleFacetsCollector(true);
FacetsCollector sfc = new FacetsCollector(true);
TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false);
newSearcher(r).search(new MatchAllDocsQuery(), MultiCollector.wrap(sfc, topDocs));

View File

@ -8,9 +8,9 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.simple.FacetField;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.writercache.TaxonomyWriterCache;
import org.apache.lucene.facet.taxonomy.writercache.cl2o.Cl2oTaxonomyWriterCache;

View File

@ -10,10 +10,10 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.simple.FacetField;
import org.apache.lucene.facet.simple.FacetsConfig;
import org.apache.lucene.facet.simple.SimpleDrillDownQuery;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.DrillDownQuery;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
@ -459,7 +459,7 @@ public class TestDirectoryTaxonomyWriter extends FacetTestCase {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
SimpleDrillDownQuery ddq = new SimpleDrillDownQuery(new FacetsConfig());
DrillDownQuery ddq = new DrillDownQuery(new FacetsConfig());
ddq.add("dim", bigs);
assertEquals(1, searcher.search(ddq, 10).totalHits);

View File

@ -26,18 +26,19 @@ import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.CountFacetRequest;
import org.apache.lucene.facet.search.DrillDownQuery;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.DrillDownQuery;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetResult;
import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.FastTaxonomyFacetCounts;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexDocument;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.SnapshotDeletionPolicy;
@ -63,11 +64,14 @@ public class IndexAndTaxonomyReplicationClientTest extends ReplicatorTestCase {
private final Directory indexDir, taxoDir;
private DirectoryReader indexReader;
private DirectoryTaxonomyReader taxoReader;
private FacetsConfig config;
private long lastIndexGeneration = -1;
public IndexAndTaxonomyReadyCallback(Directory indexDir, Directory taxoDir) throws IOException {
this.indexDir = indexDir;
this.taxoDir = taxoDir;
config = new FacetsConfig();
config.setHierarchical("A", true);
if (DirectoryReader.indexExists(indexDir)) {
indexReader = DirectoryReader.open(indexDir);
lastIndexGeneration = indexReader.getIndexCommit().getGeneration();
@ -102,14 +106,14 @@ public class IndexAndTaxonomyReplicationClientTest extends ReplicatorTestCase {
// verify faceted search
int id = Integer.parseInt(indexReader.getIndexCommit().getUserData().get(VERSION_ID), 16);
FacetLabel cp = new FacetLabel("A", Integer.toString(id, 16));
IndexSearcher searcher = new IndexSearcher(indexReader);
FacetsCollector fc = FacetsCollector.create(new FacetSearchParams(new CountFacetRequest(cp, 10)), indexReader, taxoReader);
FacetsCollector fc = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), fc);
assertEquals(1, (int) fc.getFacetResults().get(0).getFacetResultNode().value);
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc);
assertEquals(1, facets.getSpecificValue("A", Integer.toString(id, 16)).intValue());
DrillDownQuery drillDown = new DrillDownQuery(FacetIndexingParams.DEFAULT);
drillDown.add(cp);
DrillDownQuery drillDown = new DrillDownQuery(config);
drillDown.add("A", Integer.toString(id, 16));
TopDocs docs = searcher.search(drillDown, 10);
assertEquals(1, docs.totalHits);
}
@ -130,6 +134,7 @@ public class IndexAndTaxonomyReplicationClientTest extends ReplicatorTestCase {
private ReplicationHandler handler;
private IndexWriter publishIndexWriter;
private SnapshotDirectoryTaxonomyWriter publishTaxoWriter;
private FacetsConfig config;
private IndexAndTaxonomyReadyCallback callback;
private File clientWorkDir;
@ -175,11 +180,10 @@ public class IndexAndTaxonomyReplicationClientTest extends ReplicatorTestCase {
return new IndexAndTaxonomyRevision(publishIndexWriter, publishTaxoWriter);
}
private Document newDocument(TaxonomyWriter taxoWriter, int id) throws IOException {
private IndexDocument newDocument(TaxonomyWriter taxoWriter, int id) throws IOException {
Document doc = new Document();
FacetFields facetFields = new FacetFields(taxoWriter);
facetFields.addFields(doc, Collections.singleton(new FacetLabel("A", Integer.toString(id, 16))));
return doc;
doc.add(new FacetField("A", Integer.toString(id, 16)));
return config.build(doc);
}
@Override
@ -201,6 +205,8 @@ public class IndexAndTaxonomyReplicationClientTest extends ReplicatorTestCase {
conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy()));
publishIndexWriter = new IndexWriter(publishIndexDir, conf);
publishTaxoWriter = new SnapshotDirectoryTaxonomyWriter(publishTaxoDir);
config = new FacetsConfig(publishTaxoWriter);
config.setHierarchical("A", true);
}
@After

View File

@ -21,13 +21,15 @@ import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Map;
import org.apache.lucene.document.Document;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.index.IndexDocument;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
@ -41,11 +43,11 @@ import org.junit.Test;
public class IndexAndTaxonomyRevisionTest extends ReplicatorTestCase {
private Document newDocument(TaxonomyWriter taxoWriter) throws IOException {
private IndexDocument newDocument(TaxonomyWriter taxoWriter) throws IOException {
FacetsConfig config = new FacetsConfig(taxoWriter);
Document doc = new Document();
FacetFields ff = new FacetFields(taxoWriter);
ff.addFields(doc, Collections.singleton(new FacetLabel("A")));
return doc;
doc.add(new FacetField("A", "1"));
return config.build(doc);
}
@Test