eclipse rename

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5339@1542065 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2013-11-14 20:38:42 +00:00
parent 966de3eaf5
commit b5d3afa239
110 changed files with 871 additions and 875 deletions

View File

@ -20,7 +20,7 @@ package org.apache.lucene.benchmark.byTask.feeds;
import java.io.IOException;
import java.util.List;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/**
* Source items for facets.
@ -34,7 +34,7 @@ public abstract class FacetSource extends ContentItemsSource {
* account for multi-threading, as multiple threads can call this method
* simultaneously.
*/
public abstract void getNextFacets(List<CategoryPath> facets) throws NoMoreDataException, IOException;
public abstract void getNextFacets(List<FacetLabel> facets) throws NoMoreDataException, IOException;
@Override
public void resetInputs() throws IOException {

View File

@ -22,7 +22,7 @@ import java.util.List;
import java.util.Random;
import org.apache.lucene.benchmark.byTask.utils.Config;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/**
* Simple implementation of a random facet source
@ -47,7 +47,7 @@ public class RandomFacetSource extends FacetSource {
private int maxValue = maxDocFacets * maxFacetDepth;
@Override
public void getNextFacets(List<CategoryPath> facets) throws NoMoreDataException, IOException {
public void getNextFacets(List<FacetLabel> facets) throws NoMoreDataException, IOException {
facets.clear();
int numFacets = 1 + random.nextInt(maxDocFacets); // at least one facet to each doc
for (int i = 0; i < numFacets; i++) {
@ -57,7 +57,7 @@ public class RandomFacetSource extends FacetSource {
components[k] = Integer.toString(random.nextInt(maxValue));
addItem();
}
CategoryPath cp = new CategoryPath(components);
FacetLabel cp = new FacetLabel(components);
facets.add(cp);
addBytes(cp.toString().length()); // very rough approximation
}

View File

@ -23,7 +23,7 @@ import java.util.List;
import org.apache.lucene.benchmark.byTask.PerfRunData;
import org.apache.lucene.benchmark.byTask.feeds.FacetSource;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/**
* Add a faceted document.
@ -44,7 +44,7 @@ import org.apache.lucene.facet.taxonomy.CategoryPath;
*/
public class AddFacetedDocTask extends AddDocTask {
private final List<CategoryPath> facets = new ArrayList<CategoryPath>();
private final List<FacetLabel> facets = new ArrayList<FacetLabel>();
private FacetFields facetFields;
public AddFacetedDocTask(PerfRunData runData) {

View File

@ -16,7 +16,7 @@ import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -52,17 +52,17 @@ public class AssociationsFacetsExample {
* Categories per document, {@link #ASSOCIATIONS} hold the association value
* for each category.
*/
public static CategoryPath[][] CATEGORIES = {
public static FacetLabel[][] CATEGORIES = {
// Doc #1
{ new CategoryPath("tags", "lucene") ,
new CategoryPath("genre", "computing")
{ new FacetLabel("tags", "lucene") ,
new FacetLabel("genre", "computing")
},
// Doc #2
{ new CategoryPath("tags", "lucene"),
new CategoryPath("tags", "solr"),
new CategoryPath("genre", "computing"),
new CategoryPath("genre", "software")
{ new FacetLabel("tags", "lucene"),
new FacetLabel("tags", "solr"),
new FacetLabel("genre", "computing"),
new FacetLabel("genre", "software")
}
};
@ -126,8 +126,8 @@ public class AssociationsFacetsExample {
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
CategoryPath tags = new CategoryPath("tags");
CategoryPath genre = new CategoryPath("genre");
FacetLabel tags = new FacetLabel("tags");
FacetLabel genre = new FacetLabel("genre");
FacetSearchParams fsp = new FacetSearchParams(new SumIntAssociationFacetRequest(tags, 10),
new SumFloatAssociationFacetRequest(genre, 10));
FacetsCollector fc = FacetsCollector.create(fsp, indexReader, taxoReader);

View File

@ -18,7 +18,7 @@ import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.search.SumValueSourceFacetRequest;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -61,7 +61,7 @@ public class ExpressionAggregationFacetsExample {
Document doc = new Document();
doc.add(new TextField("c", text, Store.NO));
doc.add(new NumericDocValuesField("popularity", popularity));
facetFields.addFields(doc, Collections.singletonList(new CategoryPath(category, '/')));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel(category, '/')));
indexWriter.addDocument(doc);
}
@ -97,7 +97,7 @@ public class ExpressionAggregationFacetsExample {
bindings.add(new SortField("popularity", SortField.Type.LONG)); // the value of the 'popularity' field
FacetSearchParams fsp = new FacetSearchParams(
new SumValueSourceFacetRequest(new CategoryPath("A"), 10, expr.getValueSource(bindings), true));
new SumValueSourceFacetRequest(new FacetLabel("A"), 10, expr.getValueSource(bindings), true));
// Aggregates the facet values
FacetsCollector fc = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);

View File

@ -16,7 +16,7 @@ import org.apache.lucene.facet.params.PerDimensionIndexingParams;
import org.apache.lucene.facet.search.CountFacetRequest;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -55,18 +55,18 @@ public class MultiCategoryListsFacetsExample {
/** Creates a new instance and populates the catetory list params mapping. */
public MultiCategoryListsFacetsExample() {
// index all Author facets in one category list and all Publish Date in another.
Map<CategoryPath,CategoryListParams> categoryListParams = new HashMap<CategoryPath,CategoryListParams>();
categoryListParams.put(new CategoryPath("Author"), new CategoryListParams("author"));
categoryListParams.put(new CategoryPath("Publish Date"), new CategoryListParams("pubdate"));
Map<FacetLabel,CategoryListParams> categoryListParams = new HashMap<FacetLabel,CategoryListParams>();
categoryListParams.put(new FacetLabel("Author"), new CategoryListParams("author"));
categoryListParams.put(new FacetLabel("Publish Date"), new CategoryListParams("pubdate"));
indexingParams = new PerDimensionIndexingParams(categoryListParams);
}
private void add(IndexWriter indexWriter, FacetFields facetFields, String ... categoryPaths) throws IOException {
Document doc = new Document();
List<CategoryPath> paths = new ArrayList<CategoryPath>();
List<FacetLabel> paths = new ArrayList<FacetLabel>();
for (String categoryPath : categoryPaths) {
paths.add(new CategoryPath(categoryPath, '/'));
paths.add(new FacetLabel(categoryPath, '/'));
}
facetFields.addFields(doc, paths);
indexWriter.addDocument(doc);
@ -101,8 +101,8 @@ public class MultiCategoryListsFacetsExample {
// Count both "Publish Date" and "Author" dimensions
FacetSearchParams fsp = new FacetSearchParams(indexingParams,
new CountFacetRequest(new CategoryPath("Publish Date"), 10),
new CountFacetRequest(new CategoryPath("Author"), 10));
new CountFacetRequest(new FacetLabel("Publish Date"), 10),
new CountFacetRequest(new FacetLabel("Author"), 10));
// Aggregatses the facet counts
FacetsCollector fc = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);

View File

@ -12,7 +12,7 @@ import org.apache.lucene.facet.search.CountFacetRequest;
import org.apache.lucene.facet.search.DrillDownQuery;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -53,9 +53,9 @@ public class SimpleFacetsExample {
private void add(IndexWriter indexWriter, FacetFields facetFields, String ... categoryPaths) throws IOException {
Document doc = new Document();
List<CategoryPath> paths = new ArrayList<CategoryPath>();
List<FacetLabel> paths = new ArrayList<FacetLabel>();
for (String categoryPath : categoryPaths) {
paths.add(new CategoryPath(categoryPath, '/'));
paths.add(new FacetLabel(categoryPath, '/'));
}
facetFields.addFields(doc, paths);
indexWriter.addDocument(doc);
@ -90,8 +90,8 @@ public class SimpleFacetsExample {
// Count both "Publish Date" and "Author" dimensions
FacetSearchParams fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("Publish Date"), 10),
new CountFacetRequest(new CategoryPath("Author"), 10));
new CountFacetRequest(new FacetLabel("Publish Date"), 10),
new CountFacetRequest(new FacetLabel("Author"), 10));
// Aggregates the facet counts
FacetsCollector fc = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
@ -118,12 +118,12 @@ public class SimpleFacetsExample {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
// Now user drills down on Publish Date/2010:
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("Author"), 10));
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new FacetLabel("Author"), 10));
// Passing no baseQuery means we drill down on all
// documents ("browse only"):
DrillDownQuery q = new DrillDownQuery(fsp.indexingParams);
q.add(new CategoryPath("Publish Date/2010", '/'));
q.add(new FacetLabel("Publish Date/2010", '/'));
FacetsCollector fc = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
searcher.search(q, fc);

View File

@ -31,7 +31,7 @@ import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesAccumulator;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesFacetFields;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
@ -54,9 +54,9 @@ public class SimpleSortedSetFacetsExample {
private void add(IndexWriter indexWriter, SortedSetDocValuesFacetFields facetFields, String ... categoryPaths) throws IOException {
Document doc = new Document();
List<CategoryPath> paths = new ArrayList<CategoryPath>();
List<FacetLabel> paths = new ArrayList<FacetLabel>();
for (String categoryPath : categoryPaths) {
paths.add(new CategoryPath(categoryPath, '/'));
paths.add(new FacetLabel(categoryPath, '/'));
}
facetFields.addFields(doc, paths);
indexWriter.addDocument(doc);
@ -87,8 +87,8 @@ public class SimpleSortedSetFacetsExample {
// Count both "Publish Year" and "Author" dimensions
FacetSearchParams fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("Publish Year"), 10),
new CountFacetRequest(new CategoryPath("Author"), 10));
new CountFacetRequest(new FacetLabel("Publish Year"), 10),
new CountFacetRequest(new FacetLabel("Author"), 10));
// Aggregatses the facet counts
FacetsCollector fc = FacetsCollector.create(new SortedSetDocValuesAccumulator(state, fsp));
@ -114,9 +114,9 @@ public class SimpleSortedSetFacetsExample {
SortedSetDocValuesReaderState state = new SortedSetDocValuesReaderState(indexReader);
// Now user drills down on Publish Year/2010:
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("Author"), 10));
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new FacetLabel("Author"), 10));
DrillDownQuery q = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
q.add(new CategoryPath("Publish Year/2010", '/'));
q.add(new FacetLabel("Publish Year/2010", '/'));
FacetsCollector fc = FacetsCollector.create(new SortedSetDocValuesAccumulator(state, fsp));
searcher.search(q, fc);

View File

@ -5,7 +5,7 @@ import java.util.List;
import org.apache.lucene.facet.collections.ObjectToIntMap;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
@ -28,18 +28,18 @@ import org.junit.Test;
public class TestMultiCategoryListsFacetsExample extends LuceneTestCase {
private static final ObjectToIntMap<CategoryPath> expectedCounts = new ObjectToIntMap<CategoryPath>();
private static final ObjectToIntMap<FacetLabel> expectedCounts = new ObjectToIntMap<FacetLabel>();
static {
expectedCounts.put(new CategoryPath("Publish Date", "2012"), 2);
expectedCounts.put(new CategoryPath("Publish Date", "2010"), 2);
expectedCounts.put(new CategoryPath("Publish Date", "1999"), 1);
expectedCounts.put(new CategoryPath("Author", "Lisa"), 2);
expectedCounts.put(new CategoryPath("Author", "Frank"), 1);
expectedCounts.put(new CategoryPath("Author", "Susan"), 1);
expectedCounts.put(new CategoryPath("Author", "Bob"), 1);
expectedCounts.put(new FacetLabel("Publish Date", "2012"), 2);
expectedCounts.put(new FacetLabel("Publish Date", "2010"), 2);
expectedCounts.put(new FacetLabel("Publish Date", "1999"), 1);
expectedCounts.put(new FacetLabel("Author", "Lisa"), 2);
expectedCounts.put(new FacetLabel("Author", "Frank"), 1);
expectedCounts.put(new FacetLabel("Author", "Susan"), 1);
expectedCounts.put(new FacetLabel("Author", "Bob"), 1);
}
private void assertExpectedCounts(List<FacetResult> facetResults, ObjectToIntMap<CategoryPath> expCounts) {
private void assertExpectedCounts(List<FacetResult> facetResults, ObjectToIntMap<FacetLabel> expCounts) {
for (FacetResult res : facetResults) {
FacetResultNode root = res.getFacetResultNode();
for (FacetResultNode node : root.subResults) {

View File

@ -24,7 +24,7 @@ import org.apache.lucene.facet.range.LongRange;
import org.apache.lucene.facet.range.RangeFacetRequest;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
@ -33,14 +33,14 @@ import org.junit.Test;
@SuppressCodecs("Lucene3x")
public class TestRangeFacetsExample extends LuceneTestCase {
private static final ObjectToIntMap<CategoryPath> expectedCounts = new ObjectToIntMap<CategoryPath>();
private static final ObjectToIntMap<FacetLabel> expectedCounts = new ObjectToIntMap<FacetLabel>();
static {
expectedCounts.put(new CategoryPath("timestamp", "Past hour"), 4);
expectedCounts.put(new CategoryPath("timestamp", "Past six hours"), 22);
expectedCounts.put(new CategoryPath("timestamp", "Past day"), 87);
expectedCounts.put(new FacetLabel("timestamp", "Past hour"), 4);
expectedCounts.put(new FacetLabel("timestamp", "Past six hours"), 22);
expectedCounts.put(new FacetLabel("timestamp", "Past day"), 87);
}
private void assertExpectedCounts(FacetResult res, ObjectToIntMap<CategoryPath> expCounts) {
private void assertExpectedCounts(FacetResult res, ObjectToIntMap<FacetLabel> expCounts) {
FacetResultNode root = res.getFacetResultNode();
for (FacetResultNode node : root.subResults) {
assertEquals("incorrect count for " + node.label, expCounts.get(node.label), (int) node.value);

View File

@ -5,7 +5,7 @@ import java.util.List;
import org.apache.lucene.facet.collections.ObjectToIntMap;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
@ -28,24 +28,24 @@ import org.junit.Test;
public class TestSimpleFacetsExample extends LuceneTestCase {
private static final ObjectToIntMap<CategoryPath> expectedCounts = new ObjectToIntMap<CategoryPath>();
private static final ObjectToIntMap<FacetLabel> expectedCounts = new ObjectToIntMap<FacetLabel>();
static {
expectedCounts.put(new CategoryPath("Publish Date", "2012"), 2);
expectedCounts.put(new CategoryPath("Publish Date", "2010"), 2);
expectedCounts.put(new CategoryPath("Publish Date", "1999"), 1);
expectedCounts.put(new CategoryPath("Author", "Lisa"), 2);
expectedCounts.put(new CategoryPath("Author", "Frank"), 1);
expectedCounts.put(new CategoryPath("Author", "Susan"), 1);
expectedCounts.put(new CategoryPath("Author", "Bob"), 1);
expectedCounts.put(new FacetLabel("Publish Date", "2012"), 2);
expectedCounts.put(new FacetLabel("Publish Date", "2010"), 2);
expectedCounts.put(new FacetLabel("Publish Date", "1999"), 1);
expectedCounts.put(new FacetLabel("Author", "Lisa"), 2);
expectedCounts.put(new FacetLabel("Author", "Frank"), 1);
expectedCounts.put(new FacetLabel("Author", "Susan"), 1);
expectedCounts.put(new FacetLabel("Author", "Bob"), 1);
}
private static final ObjectToIntMap<CategoryPath> expectedCountsDrillDown = new ObjectToIntMap<CategoryPath>();
private static final ObjectToIntMap<FacetLabel> expectedCountsDrillDown = new ObjectToIntMap<FacetLabel>();
static {
expectedCountsDrillDown.put(new CategoryPath("Author", "Lisa"), 1);
expectedCountsDrillDown.put(new CategoryPath("Author", "Bob"), 1);
expectedCountsDrillDown.put(new FacetLabel("Author", "Lisa"), 1);
expectedCountsDrillDown.put(new FacetLabel("Author", "Bob"), 1);
}
private void assertExpectedCounts(List<FacetResult> facetResults, ObjectToIntMap<CategoryPath> expCounts) {
private void assertExpectedCounts(List<FacetResult> facetResults, ObjectToIntMap<FacetLabel> expCounts) {
for (FacetResult res : facetResults) {
FacetResultNode root = res.getFacetResultNode();
for (FacetResultNode node : root.subResults) {

View File

@ -5,7 +5,7 @@ import java.util.List;
import org.apache.lucene.facet.collections.ObjectToIntMap;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
@ -31,24 +31,24 @@ import org.junit.Test;
@SuppressCodecs({"Lucene40", "Lucene41"})
public class TestSimpleSortedSetFacetsExample extends LuceneTestCase {
private static final ObjectToIntMap<CategoryPath> expectedCounts = new ObjectToIntMap<CategoryPath>();
private static final ObjectToIntMap<FacetLabel> expectedCounts = new ObjectToIntMap<FacetLabel>();
static {
expectedCounts.put(new CategoryPath("Publish Year", "2012"), 2);
expectedCounts.put(new CategoryPath("Publish Year", "2010"), 2);
expectedCounts.put(new CategoryPath("Publish Year", "1999"), 1);
expectedCounts.put(new CategoryPath("Author", "Lisa"), 2);
expectedCounts.put(new CategoryPath("Author", "Frank"), 1);
expectedCounts.put(new CategoryPath("Author", "Susan"), 1);
expectedCounts.put(new CategoryPath("Author", "Bob"), 1);
expectedCounts.put(new FacetLabel("Publish Year", "2012"), 2);
expectedCounts.put(new FacetLabel("Publish Year", "2010"), 2);
expectedCounts.put(new FacetLabel("Publish Year", "1999"), 1);
expectedCounts.put(new FacetLabel("Author", "Lisa"), 2);
expectedCounts.put(new FacetLabel("Author", "Frank"), 1);
expectedCounts.put(new FacetLabel("Author", "Susan"), 1);
expectedCounts.put(new FacetLabel("Author", "Bob"), 1);
}
private static final ObjectToIntMap<CategoryPath> expectedCountsDrillDown = new ObjectToIntMap<CategoryPath>();
private static final ObjectToIntMap<FacetLabel> expectedCountsDrillDown = new ObjectToIntMap<FacetLabel>();
static {
expectedCountsDrillDown.put(new CategoryPath("Author", "Lisa"), 1);
expectedCountsDrillDown.put(new CategoryPath("Author", "Bob"), 1);
expectedCountsDrillDown.put(new FacetLabel("Author", "Lisa"), 1);
expectedCountsDrillDown.put(new FacetLabel("Author", "Bob"), 1);
}
private void assertExpectedCounts(List<FacetResult> facetResults, ObjectToIntMap<CategoryPath> expCounts) {
private void assertExpectedCounts(List<FacetResult> facetResults, ObjectToIntMap<FacetLabel> expCounts) {
for (FacetResult res : facetResults) {
FacetResultNode root = res.getFacetResultNode();
for (FacetResultNode node : root.subResults) {

View File

@ -3,7 +3,7 @@ package org.apache.lucene.facet.associations;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.facet.index.DrillDownStream;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.util.BytesRef;
@ -52,7 +52,7 @@ public class AssociationsDrillDownStream extends DrillDownStream {
}
@Override
protected void addAdditionalAttributes(CategoryPath cp, boolean isParent) {
protected void addAdditionalAttributes(FacetLabel cp, boolean isParent) {
if (isParent) {
return; // associations are not added to parent categories
}

View File

@ -11,7 +11,7 @@ import org.apache.lucene.facet.index.DrillDownStream;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.util.BytesRef;
@ -37,7 +37,7 @@ import org.apache.lucene.util.IntsRef;
/**
* A utility class for adding facet fields to a document. Usually one field will
* be added for all facets, however per the
* {@link FacetIndexingParams#getCategoryListParams(CategoryPath)}, one field
* {@link FacetIndexingParams#getCategoryListParams(FacetLabel)}, one field
* may be added for every group of facets.
*
* @lucene.experimental
@ -77,12 +77,12 @@ public class AssociationsFacetFields extends FacetFields {
}
@Override
protected Map<CategoryListParams,Iterable<CategoryPath>> createCategoryListMapping(
Iterable<CategoryPath> categories) {
protected Map<CategoryListParams,Iterable<FacetLabel>> createCategoryListMapping(
Iterable<FacetLabel> categories) {
CategoryAssociationsContainer categoryAssociations = (CategoryAssociationsContainer) categories;
HashMap<CategoryListParams,Iterable<CategoryPath>> categoryLists =
new HashMap<CategoryListParams,Iterable<CategoryPath>>();
for (CategoryPath cp : categories) {
HashMap<CategoryListParams,Iterable<FacetLabel>> categoryLists =
new HashMap<CategoryListParams,Iterable<FacetLabel>>();
for (FacetLabel cp : categories) {
// each category may be indexed under a different field, so add it to the right list.
CategoryListParams clp = indexingParams.getCategoryListParams(cp);
CategoryAssociationsContainer clpContainer = (CategoryAssociationsContainer) categoryLists.get(clp);
@ -97,13 +97,13 @@ public class AssociationsFacetFields extends FacetFields {
@Override
protected Map<String,BytesRef> getCategoryListData(CategoryListParams categoryListParams, IntsRef ordinals,
Iterable<CategoryPath> categories) throws IOException {
Iterable<FacetLabel> categories) throws IOException {
AssociationsListBuilder associations = new AssociationsListBuilder((CategoryAssociationsContainer) categories);
return associations.build(ordinals, categories);
}
@Override
protected DrillDownStream getDrillDownStream(Iterable<CategoryPath> categories) {
protected DrillDownStream getDrillDownStream(Iterable<FacetLabel> categories) {
return new AssociationsDrillDownStream((CategoryAssociationsContainer) categories, indexingParams);
}
@ -113,7 +113,7 @@ public class AssociationsFacetFields extends FacetFields {
}
@Override
public void addFields(Document doc, Iterable<CategoryPath> categories) throws IOException {
public void addFields(Document doc, Iterable<FacetLabel> categories) throws IOException {
if (!(categories instanceof CategoryAssociationsContainer)) {
throw new IllegalArgumentException("categories must be of type " +
CategoryAssociationsContainer.class.getSimpleName());

View File

@ -6,7 +6,7 @@ import java.util.Map;
import org.apache.lucene.facet.index.CategoryListBuilder;
import org.apache.lucene.facet.index.CountingListBuilder;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRef;
@ -47,10 +47,10 @@ public class AssociationsListBuilder implements CategoryListBuilder {
}
@Override
public Map<String,BytesRef> build(IntsRef ordinals, Iterable<CategoryPath> categories) throws IOException {
public Map<String,BytesRef> build(IntsRef ordinals, Iterable<FacetLabel> categories) throws IOException {
final HashMap<String,BytesRef> res = new HashMap<String,BytesRef>();
int idx = 0;
for (CategoryPath cp : categories) {
for (FacetLabel cp : categories) {
// build per-association key BytesRef
CategoryAssociation association = associations.getAssociation(cp);

View File

@ -1,6 +1,6 @@
package org.apache.lucene.facet.associations;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.store.DataInput;
@ -24,7 +24,7 @@ import org.apache.lucene.store.DataOutput;
*/
/**
* Allows associating an arbitrary value with a {@link CategoryPath}.
* Allows associating an arbitrary value with a {@link FacetLabel}.
*
* @lucene.experimental
*/

View File

@ -3,7 +3,7 @@ package org.apache.lucene.facet.associations;
import java.util.HashMap;
import java.util.Iterator;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -22,17 +22,17 @@ import org.apache.lucene.facet.taxonomy.CategoryPath;
* limitations under the License.
*/
/** Holds {@link CategoryAssociation} per {@link CategoryPath}. */
public class CategoryAssociationsContainer implements Iterable<CategoryPath> {
/** Holds {@link CategoryAssociation} per {@link FacetLabel}. */
public class CategoryAssociationsContainer implements Iterable<FacetLabel> {
private final HashMap<CategoryPath,CategoryAssociation> categoryAssociations =
new HashMap<CategoryPath,CategoryAssociation>();
private final HashMap<FacetLabel,CategoryAssociation> categoryAssociations =
new HashMap<FacetLabel,CategoryAssociation>();
/**
* Adds the {@link CategoryAssociation} for the given {@link CategoryPath
* Adds the {@link CategoryAssociation} for the given {@link FacetLabel
* category}. Overrides any assocation that was previously set.
*/
public void setAssociation(CategoryPath category, CategoryAssociation association) {
public void setAssociation(FacetLabel category, CategoryAssociation association) {
if (association == null) {
throw new IllegalArgumentException("cannot set a null association to a category");
}
@ -41,14 +41,14 @@ public class CategoryAssociationsContainer implements Iterable<CategoryPath> {
/**
* Returns the {@link CategoryAssociation} that was set for the
* {@link CategoryPath category}, or {@code null} if none was defined.
* {@link FacetLabel category}, or {@code null} if none was defined.
*/
public CategoryAssociation getAssociation(CategoryPath category) {
public CategoryAssociation getAssociation(FacetLabel category) {
return categoryAssociations.get(category);
}
@Override
public Iterator<CategoryPath> iterator() {
public Iterator<FacetLabel> iterator() {
return categoryAssociations.keySet().iterator();
}

View File

@ -3,7 +3,7 @@ package org.apache.lucene.facet.associations;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetsAggregator;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -34,7 +34,7 @@ public class SumFloatAssociationFacetRequest extends FacetRequest {
* Create a float association facet request for a given node in the
* taxonomy.
*/
public SumFloatAssociationFacetRequest(CategoryPath path, int num) {
public SumFloatAssociationFacetRequest(FacetLabel path, int num) {
super(path, num);
}

View File

@ -3,7 +3,7 @@ package org.apache.lucene.facet.associations;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetsAggregator;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -34,7 +34,7 @@ public class SumIntAssociationFacetRequest extends FacetRequest {
* Create an integer association facet request for a given node in the
* taxonomy.
*/
public SumIntAssociationFacetRequest(CategoryPath path, int num) {
public SumIntAssociationFacetRequest(FacetLabel path, int num) {
super(path, num);
}

View File

@ -22,7 +22,7 @@ import org.apache.lucene.facet.search.CategoryListIterator;
import org.apache.lucene.facet.search.CountFacetRequest;
import org.apache.lucene.facet.search.FacetArrays;
import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.util.PartitionsUtils;
import org.apache.lucene.index.IndexReader;
@ -151,7 +151,7 @@ public class TotalFacetCounts {
}
// needed because FacetSearchParams do not allow empty FacetRequests
private static final FacetRequest DUMMY_REQ = new CountFacetRequest(CategoryPath.EMPTY, 1);
private static final FacetRequest DUMMY_REQ = new CountFacetRequest(FacetLabel.EMPTY, 1);
static TotalFacetCounts compute(final IndexReader indexReader, final TaxonomyReader taxonomy,
final FacetIndexingParams facetIndexingParams) throws IOException {

View File

@ -3,7 +3,7 @@ package org.apache.lucene.facet.index;
import java.io.IOException;
import java.util.Map;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IntsRef;
@ -33,6 +33,6 @@ import org.apache.lucene.util.IntsRef;
public interface CategoryListBuilder {
/** Returns the encoded ordinals data. */
public Map<String,BytesRef> build(IntsRef ordinals, Iterable<CategoryPath> categories) throws IOException;
public Map<String,BytesRef> build(IntsRef ordinals, Iterable<FacetLabel> categories) throws IOException;
}

View File

@ -11,7 +11,7 @@ import org.apache.lucene.facet.encoding.IntEncoder;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.CategoryListParams.OrdinalPolicy;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.util.PartitionsUtils;
import org.apache.lucene.util.BytesRef;
@ -141,13 +141,13 @@ public class CountingListBuilder implements CategoryListBuilder {
* processing the array for other purposes.
*/
@Override
public Map<String,BytesRef> build(IntsRef ordinals, Iterable<CategoryPath> categories) throws IOException {
public Map<String,BytesRef> build(IntsRef ordinals, Iterable<FacetLabel> categories) throws IOException {
int upto = ordinals.length; // since we may add ordinals to IntsRef, iterate upto original length
Iterator<CategoryPath> iter = categories.iterator();
Iterator<FacetLabel> iter = categories.iterator();
for (int i = 0; i < upto; i++) {
int ordinal = ordinals.ints[i];
CategoryPath cp = iter.next();
FacetLabel cp = iter.next();
OrdinalPolicy op = clp.getOrdinalPolicy(cp.components[0]);
if (op != OrdinalPolicy.NO_PARENTS) {
// need to add parents too

View File

@ -6,7 +6,7 @@ import java.util.Iterator;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -33,19 +33,19 @@ import org.apache.lucene.facet.taxonomy.CategoryPath;
public class DrillDownStream extends TokenStream {
private final FacetIndexingParams indexingParams;
private final Iterator<CategoryPath> categories;
private final Iterator<FacetLabel> categories;
private final CharTermAttribute termAttribute;
private CategoryPath current;
private FacetLabel current;
private boolean isParent;
public DrillDownStream(Iterable<CategoryPath> categories, FacetIndexingParams indexingParams) {
public DrillDownStream(Iterable<FacetLabel> categories, FacetIndexingParams indexingParams) {
termAttribute = addAttribute(CharTermAttribute.class);
this.categories = categories.iterator();
this.indexingParams = indexingParams;
}
protected void addAdditionalAttributes(CategoryPath category, boolean isParent) {
protected void addAdditionalAttributes(FacetLabel category, boolean isParent) {
// a hook for AssociationsDrillDownStream to add the associations payload to
// the drill-down terms
}

View File

@ -15,7 +15,7 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.util.BytesRef;
@ -41,7 +41,7 @@ import org.apache.lucene.util.IntsRef;
/**
* A utility class for adding facet fields to a document. Usually one field will
* be added for all facets, however per the
* {@link FacetIndexingParams#getCategoryListParams(CategoryPath)}, one field
* {@link FacetIndexingParams#getCategoryListParams(FacetLabel)}, one field
* may be added for every group of facets.
*
* @lucene.experimental
@ -88,21 +88,21 @@ public class FacetFields {
/**
* Creates a mapping between a {@link CategoryListParams} and all
* {@link CategoryPath categories} that are associated with it.
* {@link FacetLabel categories} that are associated with it.
*/
protected Map<CategoryListParams,Iterable<CategoryPath>> createCategoryListMapping(
Iterable<CategoryPath> categories) {
protected Map<CategoryListParams,Iterable<FacetLabel>> createCategoryListMapping(
Iterable<FacetLabel> categories) {
if (indexingParams.getAllCategoryListParams().size() == 1) {
return Collections.singletonMap(indexingParams.getCategoryListParams(null), categories);
}
HashMap<CategoryListParams,Iterable<CategoryPath>> categoryLists =
new HashMap<CategoryListParams,Iterable<CategoryPath>>();
for (CategoryPath cp : categories) {
HashMap<CategoryListParams,Iterable<FacetLabel>> categoryLists =
new HashMap<CategoryListParams,Iterable<FacetLabel>>();
for (FacetLabel cp : categories) {
// each category may be indexed under a different field, so add it to the right list.
CategoryListParams clp = indexingParams.getCategoryListParams(cp);
List<CategoryPath> list = (List<CategoryPath>) categoryLists.get(clp);
List<FacetLabel> list = (List<FacetLabel>) categoryLists.get(clp);
if (list == null) {
list = new ArrayList<CategoryPath>();
list = new ArrayList<FacetLabel>();
categoryLists.put(clp, list);
}
list.add(cp);
@ -113,10 +113,10 @@ public class FacetFields {
/**
* Returns the category list data, as a mapping from key to {@link BytesRef}
* which includes the encoded data. Every ordinal in {@code ordinals}
* corrspond to a {@link CategoryPath} returned from {@code categories}.
* corrspond to a {@link FacetLabel} returned from {@code categories}.
*/
protected Map<String,BytesRef> getCategoryListData(CategoryListParams categoryListParams,
IntsRef ordinals, Iterable<CategoryPath> categories /* needed for AssociationsFacetFields */)
IntsRef ordinals, Iterable<FacetLabel> categories /* needed for AssociationsFacetFields */)
throws IOException {
return new CountingListBuilder(categoryListParams, indexingParams, taxonomyWriter).build(ordinals, categories);
}
@ -125,7 +125,7 @@ public class FacetFields {
* Returns a {@link DrillDownStream} for writing the categories drill-down
* terms.
*/
protected DrillDownStream getDrillDownStream(Iterable<CategoryPath> categories) {
protected DrillDownStream getDrillDownStream(Iterable<FacetLabel> categories) {
return new DrillDownStream(categories, indexingParams);
}
@ -148,7 +148,7 @@ public class FacetFields {
}
/** Adds the needed facet fields to the document. */
public void addFields(Document doc, Iterable<CategoryPath> categories) throws IOException {
public void addFields(Document doc, Iterable<FacetLabel> categories) throws IOException {
if (categories == null) {
throw new IllegalArgumentException("categories should not be null");
}
@ -159,19 +159,19 @@ public class FacetFields {
// - DrillDownStream
// - CountingListStream
final Map<CategoryListParams,Iterable<CategoryPath>> categoryLists = createCategoryListMapping(categories);
final Map<CategoryListParams,Iterable<FacetLabel>> categoryLists = createCategoryListMapping(categories);
// for each CLP we add a different field for drill-down terms as well as for
// counting list data.
IntsRef ordinals = new IntsRef(32); // should be enough for most common applications
for (Entry<CategoryListParams, Iterable<CategoryPath>> e : categoryLists.entrySet()) {
for (Entry<CategoryListParams, Iterable<FacetLabel>> e : categoryLists.entrySet()) {
final CategoryListParams clp = e.getKey();
final String field = clp.field;
// build category list data
ordinals.length = 0; // reset
int maxNumOrds = 0;
for (CategoryPath cp : e.getValue()) {
for (FacetLabel cp : e.getValue()) {
int ordinal = taxonomyWriter.addCategory(cp);
maxNumOrds += cp.length; // ordinal and potentially all parents
if (ordinals.ints.length < maxNumOrds) {

View File

@ -9,7 +9,7 @@ import org.apache.lucene.facet.encoding.SortingIntEncoder;
import org.apache.lucene.facet.encoding.UniqueValuesIntEncoder;
import org.apache.lucene.facet.search.CategoryListIterator;
import org.apache.lucene.facet.search.DocValuesCategoryListIterator;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.util.PartitionsUtils;
/*
@ -77,7 +77,7 @@ public class CategoryListParams {
/**
* Encodes the ordinals of all path components except the dimension. The
* dimension of a category is defined to be the first components in
* {@link CategoryPath#components}. For the category A/B/C, the ordinal of
* {@link FacetLabel#components}. For the category A/B/C, the ordinal of
* A/B will be encoded as well, however not the ordinal of A.
*
* <p>

View File

@ -4,7 +4,7 @@ import java.util.Collections;
import java.util.List;
import org.apache.lucene.facet.search.FacetArrays;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -52,7 +52,7 @@ public class FacetIndexingParams {
public static final FacetIndexingParams DEFAULT = new FacetIndexingParams();
/**
* The default delimiter with which {@link CategoryPath#components} are
* The default delimiter with which {@link FacetLabel#components} are
* concatenated when written to the index, e.g. as drill-down terms. If you
* choose to override it by overiding {@link #getFacetDelimChar()}, you should
* make sure that you return a character that's not found in any path
@ -79,13 +79,13 @@ public class FacetIndexingParams {
}
/**
* Returns the {@link CategoryListParams} for this {@link CategoryPath}. The
* Returns the {@link CategoryListParams} for this {@link FacetLabel}. The
* default implementation returns the same {@link CategoryListParams} for all
* categories (even if {@code category} is {@code null}).
*
* @see PerDimensionIndexingParams
*/
public CategoryListParams getCategoryListParams(CategoryPath category) {
public CategoryListParams getCategoryListParams(FacetLabel category) {
return clParams;
}
@ -95,9 +95,9 @@ public class FacetIndexingParams {
* that were written.
* <p>
* <b>NOTE:</b> You should make sure that the {@code char[]} is large enough,
* by e.g. calling {@link CategoryPath#fullPathLength()}.
* by e.g. calling {@link FacetLabel#fullPathLength()}.
*/
public int drillDownTermText(CategoryPath path, char[] buffer) {
public int drillDownTermText(FacetLabel path, char[] buffer) {
return path.copyFullPath(buffer, 0, getFacetDelimChar());
}

View File

@ -6,7 +6,7 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -27,11 +27,11 @@ import org.apache.lucene.facet.taxonomy.CategoryPath;
/**
* A {@link FacetIndexingParams} that utilizes different category lists, defined
* by the dimension specified by a {@link CategoryPath category} (see
* by the dimension specified by a {@link FacetLabel category} (see
* {@link #PerDimensionIndexingParams(Map, CategoryListParams)}.
* <p>
* A 'dimension' is defined as the first or "zero-th" component in a
* {@link CategoryPath}. For example, if a category is defined as
* {@link FacetLabel}. For example, if a category is defined as
* "Author/American/Mark Twain", then the dimension would be "Author".
*
* @lucene.experimental
@ -43,7 +43,7 @@ public class PerDimensionIndexingParams extends FacetIndexingParams {
/**
* Initializes a new instance with the given dimension-to-params mapping. The
* dimension is considered as what's returned by
* {@link CategoryPath#components cp.components[0]}.
* {@link FacetLabel#components cp.components[0]}.
*
* <p>
* <b>NOTE:</b> for any dimension whose {@link CategoryListParams} is not
@ -51,7 +51,7 @@ public class PerDimensionIndexingParams extends FacetIndexingParams {
*
* @see #PerDimensionIndexingParams(Map, CategoryListParams)
*/
public PerDimensionIndexingParams(Map<CategoryPath, CategoryListParams> paramsMap) {
public PerDimensionIndexingParams(Map<FacetLabel, CategoryListParams> paramsMap) {
this(paramsMap, DEFAULT_CATEGORY_LIST_PARAMS);
}
@ -60,11 +60,11 @@ public class PerDimensionIndexingParams extends FacetIndexingParams {
* {@link CategoryListParams} will be used for any dimension that is not
* specified in the given mapping.
*/
public PerDimensionIndexingParams(Map<CategoryPath, CategoryListParams> paramsMap,
public PerDimensionIndexingParams(Map<FacetLabel, CategoryListParams> paramsMap,
CategoryListParams categoryListParams) {
super(categoryListParams);
clParamsMap = new HashMap<String,CategoryListParams>();
for (Entry<CategoryPath, CategoryListParams> e : paramsMap.entrySet()) {
for (Entry<FacetLabel, CategoryListParams> e : paramsMap.entrySet()) {
clParamsMap.put(e.getKey().components[0], e.getValue());
}
}
@ -83,7 +83,7 @@ public class PerDimensionIndexingParams extends FacetIndexingParams {
* returns the default {@link CategoryListParams}.
*/
@Override
public CategoryListParams getCategoryListParams(CategoryPath category) {
public CategoryListParams getCategoryListParams(FacetLabel category) {
if (category != null) {
CategoryListParams clParams = clParamsMap.get(category.components[0]);
if (clParams != null) {

View File

@ -2,7 +2,7 @@ package org.apache.lucene.facet.params;
import java.util.Map;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -25,7 +25,7 @@ import org.apache.lucene.facet.taxonomy.CategoryPath;
* A {@link CategoryListParams} which allow controlling the
* {@link CategoryListParams.OrdinalPolicy} used for each dimension. The
* dimension is specified as the first component in
* {@link CategoryPath#components}.
* {@link FacetLabel#components}.
*/
public class PerDimensionOrdinalPolicy extends CategoryListParams {

View File

@ -25,7 +25,7 @@ import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.search.FacetsAggregator;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.queries.function.FunctionValues;
@ -73,7 +73,7 @@ public class RangeFacetRequest<T extends Range> extends FacetRequest {
*/
@SuppressWarnings("unchecked")
public RangeFacetRequest(String label, ValueSource valueSource, T...ranges) {
super(new CategoryPath(label), 1);
super(new FacetLabel(label), 1);
this.ranges = ranges;
this.valueSource = valueSource;
this.label = label;

View File

@ -18,7 +18,7 @@ package org.apache.lucene.facet.range;
*/
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/** Holds the facet results for a {@link
* RangeFacetRequest}. */
@ -28,6 +28,6 @@ public class RangeFacetResultNode extends FacetResultNode {
RangeFacetResultNode(String field, Range range, int count) {
super(-1, count);
this.range = range;
this.label = new CategoryPath(field, range.label);
this.label = new FacetLabel(field, range.label);
}
}

View File

@ -7,7 +7,7 @@ import org.apache.lucene.facet.old.ScoredDocIDsIterator;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.DrillDownQuery;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
@ -88,7 +88,7 @@ public class TakmiSampleFixer extends SampleFixer {
if (fresNode.label == null) {
fresNode.label = taxonomyReader.getPath(fresNode.ordinal);
}
CategoryPath catPath = fresNode.label;
FacetLabel catPath = fresNode.label;
Term drillDownTerm = DrillDownQuery.term(searchParams.indexingParams, catPath);
// TODO (Facet): avoid Multi*?

View File

@ -1,7 +1,7 @@
package org.apache.lucene.facet.search;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -27,7 +27,7 @@ import org.apache.lucene.facet.taxonomy.CategoryPath;
*/
public class CountFacetRequest extends FacetRequest {
public CountFacetRequest(CategoryPath path, int num) {
public CountFacetRequest(FacetLabel path, int num) {
super(path, num);
}

View File

@ -24,7 +24,7 @@ import java.util.Map;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
@ -38,8 +38,8 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
/**
* A {@link Query} for drill-down over {@link CategoryPath categories}. You
* should call {@link #add(CategoryPath...)} for every group of categories you
* A {@link Query} for drill-down over {@link FacetLabel categories}. You
* should call {@link #add(FacetLabel...)} for every group of categories you
* want to drill-down over. Each category in the group is {@code OR'ed} with
* the others, and groups are {@code AND'ed}.
* <p>
@ -53,7 +53,7 @@ import org.apache.lucene.search.TermQuery;
public final class DrillDownQuery extends Query {
/** Return a drill-down {@link Term} for a category. */
public static Term term(FacetIndexingParams iParams, CategoryPath path) {
public static Term term(FacetIndexingParams iParams, FacetLabel path) {
CategoryListParams clp = iParams.getCategoryListParams(path);
char[] buffer = new char[path.fullPathLength()];
iParams.drillDownTermText(path, buffer);
@ -128,7 +128,7 @@ public final class DrillDownQuery extends Query {
* Adds one dimension of drill downs; if you pass multiple values they are
* OR'd, and then the entire dimension is AND'd against the base query.
*/
public void add(CategoryPath... paths) {
public void add(FacetLabel... paths) {
Query q;
if (paths[0].length == 0) {
throw new IllegalArgumentException("all CategoryPaths must have length > 0");
@ -141,7 +141,7 @@ public final class DrillDownQuery extends Query {
q = new TermQuery(term(fip, paths[0]));
} else {
BooleanQuery bq = new BooleanQuery(true); // disable coord
for (CategoryPath cp : paths) {
for (FacetLabel cp : paths) {
if (cp.length == 0) {
throw new IllegalArgumentException("all CategoryPaths must have length > 0");
}

View File

@ -3,7 +3,7 @@ package org.apache.lucene.facet.search;
import org.apache.lucene.facet.params.CategoryListParams.OrdinalPolicy;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.range.RangeFacetRequest;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -64,7 +64,7 @@ public abstract class FacetRequest {
public enum SortOrder { ASCENDING, DESCENDING }
/** The category being aggregated in this facet request. */
public final CategoryPath categoryPath;
public final FacetLabel categoryPath;
/** The number of child categories to return for {@link #categoryPath}. */
public final int numResults;
@ -88,7 +88,7 @@ public abstract class FacetRequest {
* {@code Integer.MAX_VALUE}, all immediate child categories will be
* returned. Must be greater than 0.
*/
public FacetRequest(CategoryPath path, int numResults) {
public FacetRequest(FacetLabel path, int numResults) {
if (numResults <= 0) {
throw new IllegalArgumentException("num results must be a positive (>0) number: " + numResults);
}

View File

@ -8,7 +8,7 @@ import java.util.List;
import java.util.Map;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.util.CollectionUtil;
@ -36,7 +36,7 @@ import org.apache.lucene.util.CollectionUtil;
*/
public class FacetResult {
private static FacetResultNode addIfNotExist(Map<CategoryPath, FacetResultNode> nodes, FacetResultNode node) {
private static FacetResultNode addIfNotExist(Map<FacetLabel, FacetResultNode> nodes, FacetResultNode node) {
FacetResultNode n = nodes.get(node.label);
if (n == null) {
nodes.put(node.label, node);
@ -51,7 +51,7 @@ public class FacetResult {
* the hierarchy. The results are merged according to the following rules:
* <ul>
* <li>If two results share the same dimension (first component in their
* {@link CategoryPath}), they are merged.
* {@link FacetLabel}), they are merged.
* <li>If a result is missing ancestors in the other results, e.g. A/B/C but
* no corresponding A or A/B, these nodes are 'filled' with their label,
* ordinal and value (obtained from the respective {@link FacetArrays}).
@ -94,7 +94,7 @@ public class FacetResult {
return fr1.getFacetRequest().categoryPath.compareTo(fr2.getFacetRequest().categoryPath);
}
});
Map<CategoryPath, FacetResultNode> mergedNodes = new HashMap<CategoryPath,FacetResultNode>();
Map<FacetLabel, FacetResultNode> mergedNodes = new HashMap<FacetLabel,FacetResultNode>();
FacetArrays arrays = dimArrays != null ? dimArrays.get(frs.get(0).getFacetRequest().categoryPath.components[0]) : null;
for (FacetResult fr : frs) {
FacetRequest freq = fr.getFacetRequest();
@ -105,7 +105,7 @@ public class FacetResult {
FacetResultNode frn = fr.getFacetResultNode();
FacetResultNode merged = mergedNodes.get(frn.label);
if (merged == null) {
CategoryPath parent = frn.label.subpath(frn.label.length - 1);
FacetLabel parent = frn.label.subpath(frn.label.length - 1);
FacetResultNode childNode = frn;
FacetResultNode parentNode = null;
while (parent.length > 0 && (parentNode = mergedNodes.get(parent)) == null) {
@ -154,8 +154,8 @@ public class FacetResult {
}
// find the 'first' node to put on the FacetResult root
CategoryPath min = null;
for (CategoryPath cp : mergedNodes.keySet()) {
FacetLabel min = null;
for (FacetLabel cp : mergedNodes.keySet()) {
if (min == null || cp.compareTo(min) < 0) {
min = cp;
}

View File

@ -4,7 +4,7 @@ import java.util.Collections;
import java.util.List;
import org.apache.lucene.facet.search.FacetRequest.ResultMode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
/*
@ -42,7 +42,7 @@ public class FacetResultNode implements Comparable<FacetResultNode> {
public int ordinal;
/**
* The {@link CategoryPath label} of this result. May be {@code null} if not
* The {@link FacetLabel label} of this result. May be {@code null} if not
* computed, in which case use {@link TaxonomyReader#getPath(int)} to label
* it.
* <p>
@ -50,7 +50,7 @@ public class FacetResultNode implements Comparable<FacetResultNode> {
* {@link FacetRequest#getNumLabel()} &lt;
* {@link FacetRequest#numResults} there will be unlabeled nodes.
*/
public CategoryPath label;
public FacetLabel label;
/**
* The value of this result. Its actual type depends on the

View File

@ -5,7 +5,7 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
@ -203,7 +203,7 @@ public abstract class FacetsCollector extends Collector {
/**
* Returns a {@link FacetResult} per {@link FacetRequest} set in
* {@link FacetSearchParams}. Note that if a {@link FacetRequest} defines a
* {@link CategoryPath} which does not exist in the taxonomy, an empty
* {@link FacetLabel} which does not exist in the taxonomy, an empty
* {@link FacetResult} will be returned for it.
*/
public final List<FacetResult> getFacetResults() throws IOException {

View File

@ -11,7 +11,7 @@ import org.apache.lucene.facet.search.FacetArrays;
import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetsAggregator;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -41,7 +41,7 @@ import org.apache.lucene.facet.taxonomy.CategoryPath;
*/
public class MultiFacetsAggregator implements FacetsAggregator {
private final Map<CategoryPath,FacetsAggregator> categoryAggregators;
private final Map<FacetLabel,FacetsAggregator> categoryAggregators;
private final List<FacetsAggregator> aggregators;
/**
@ -49,11 +49,11 @@ public class MultiFacetsAggregator implements FacetsAggregator {
* <p>
* The mapping is used to rollup the values of the specific category by the
* corresponding {@link FacetsAggregator}. It is ok to pass differnet
* {@link FacetsAggregator} instances for each {@link CategoryPath} - the
* {@link FacetsAggregator} instances for each {@link FacetLabel} - the
* constructor ensures that each aggregator <u>type</u> (determined by its
* class) is invoked only once.
*/
public MultiFacetsAggregator(Map<CategoryPath,FacetsAggregator> aggregators) {
public MultiFacetsAggregator(Map<FacetLabel,FacetsAggregator> aggregators) {
this.categoryAggregators = aggregators;
// make sure that each FacetsAggregator class is invoked only once, or

View File

@ -1,7 +1,7 @@
package org.apache.lucene.facet.search;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -29,7 +29,7 @@ import org.apache.lucene.facet.taxonomy.CategoryPath;
public class SumScoreFacetRequest extends FacetRequest {
/** Create a score facet request for a given node in the taxonomy. */
public SumScoreFacetRequest(CategoryPath path, int num) {
public SumScoreFacetRequest(FacetLabel path, int num) {
super(path, num);
}

View File

@ -10,7 +10,7 @@ import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.search.OrdinalValueResolver.FloatValueResolver;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
@ -176,7 +176,7 @@ public class SumValueSourceFacetRequest extends FacetRequest {
* documents' values. You can also specify if the value source requires
* document scores or not.
*/
public SumValueSourceFacetRequest(CategoryPath path, int num, ValueSource valueSource, boolean requiresDocScores) {
public SumValueSourceFacetRequest(FacetLabel path, int num, ValueSource valueSource, boolean requiresDocScores) {
super(path, num);
this.valueSource = valueSource;
this.requiresDocScores = requiresDocScores;

View File

@ -14,7 +14,7 @@ import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.FacetRequest.ResultMode;
import org.apache.lucene.facet.search.FacetRequest.SortOrder;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.IndexReader;
@ -124,7 +124,7 @@ public class TaxonomyFacetsAccumulator extends FacetsAccumulator {
CategoryListParams clp = e.getKey();
List<FacetRequest> requests = e.getValue();
Map<Class<? extends FacetsAggregator>,FacetsAggregator> aggClasses = new HashMap<Class<? extends FacetsAggregator>,FacetsAggregator>();
Map<CategoryPath,FacetsAggregator> perCategoryAggregator = new HashMap<CategoryPath,FacetsAggregator>();
Map<FacetLabel,FacetsAggregator> perCategoryAggregator = new HashMap<FacetLabel,FacetsAggregator>();
for (FacetRequest fr : requests) {
FacetsAggregator fa = fr.createFacetsAggregator(searchParams.indexingParams);
if (fa == null) {

View File

@ -94,7 +94,7 @@ public class TopKInEachNodeHandler extends PartitionsFacetResultsHandler {
* too, excluded from the FacetResult tree.
* @throws IOException
* in case
* {@link TaxonomyReader#getOrdinal(org.apache.lucene.facet.taxonomy.CategoryPath)}
* {@link TaxonomyReader#getOrdinal(org.apache.lucene.facet.taxonomy.FacetLabel)}
* does.
* @see #fetchPartitionResult(int)
*/

View File

@ -29,7 +29,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.index.IndexDocument;
import org.apache.lucene.index.IndexWriter;
@ -155,7 +155,7 @@ public class FacetIndexWriter extends IndexWriter {
throw new IllegalArgumentException("dimension \"" + facetField.dim + "\" is not hierarchical yet has " + facetField.path.length + " components");
}
CategoryPath cp = CategoryPath.create(facetField.dim, facetField.path);
FacetLabel cp = FacetLabel.create(facetField.dim, facetField.path);
int ordinal = taxoWriter.addCategory(cp);
ordinals.ints[ordinals.length++] = ordinal;
@ -193,7 +193,7 @@ public class FacetIndexWriter extends IndexWriter {
//System.out.println(" field=" + indexedFieldName);
for(SortedSetDocValuesFacetField facetField : ent.getValue()) {
CategoryPath cp = new CategoryPath(facetField.dim, facetField.label);
FacetLabel cp = new FacetLabel(facetField.dim, facetField.label);
String fullPath = cp.toString(facetDelimChar);
//System.out.println("add " + fullPath);

View File

@ -18,21 +18,17 @@ package org.apache.lucene.facet.simple;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.lucene.facet.range.Range;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
/**
* Uses {@link RangeFacetRequest#getValues(AtomicReaderContext)} and accumulates
* counts for provided ranges.
* accumulates counts for provided ranges.
*/
public class RangeFacetCounts extends Facets {
private final Range[] ranges;

View File

@ -23,7 +23,7 @@ import java.util.List;
import java.util.Map;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
@ -37,8 +37,8 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
/**
* A {@link Query} for drill-down over {@link CategoryPath categories}. You
* should call {@link #add(CategoryPath...)} for every group of categories you
* A {@link Query} for drill-down over {@link FacetLabel categories}. You
* should call {@link #add(FacetLabel...)} for every group of categories you
* want to drill-down over. Each category in the group is {@code OR'ed} with
* the others, and groups are {@code AND'ed}.
* <p>
@ -51,7 +51,7 @@ import org.apache.lucene.search.TermQuery;
*/
public final class SimpleDrillDownQuery extends Query {
private static Term term(String field, char delimChar, CategoryPath path) {
private static Term term(String field, char delimChar, FacetLabel path) {
return new Term(field, path.toString(delimChar));
}
@ -93,7 +93,7 @@ public final class SimpleDrillDownQuery extends Query {
}
/**
* Creates a new {@link DrillDownQuery} without a base query,
* Creates a new {@code SimpleDrillDownQuery} without a base query,
* to perform a pure browsing query (equivalent to using
* {@link MatchAllDocsQuery} as base).
*/
@ -102,7 +102,7 @@ public final class SimpleDrillDownQuery extends Query {
}
/**
* Creates a new {@link DrillDownQuery} over the given base query. Can be
* Creates a new {@code SimpleDrillDownQuery} over the given base query. Can be
* {@code null}, in which case the result {@link Query} from
* {@link #rewrite(IndexReader)} will be a pure browsing query, filtering on
* the added categories only.
@ -119,17 +119,17 @@ public final class SimpleDrillDownQuery extends Query {
* OR'd, and then the entire dimension is AND'd against the base query.
*/
// nocommit can we remove CatPath here?
public void add(CategoryPath... paths) {
public void add(FacetLabel... paths) {
add(FacetsConfig.DEFAULT_INDEXED_FIELD_NAME, Constants.DEFAULT_DELIM_CHAR, paths);
}
// nocommit can we remove CatPath here?
public void add(String field, CategoryPath... paths) {
public void add(String field, FacetLabel... paths) {
add(field, Constants.DEFAULT_DELIM_CHAR, paths);
}
// nocommit can we remove CatPath here?
public void add(String field, char delimChar, CategoryPath... paths) {
public void add(String field, char delimChar, FacetLabel... paths) {
Query q;
if (paths[0].length == 0) {
throw new IllegalArgumentException("all CategoryPaths must have length > 0");
@ -142,7 +142,7 @@ public final class SimpleDrillDownQuery extends Query {
q = new TermQuery(term(field, delimChar, paths[0]));
} else {
BooleanQuery bq = new BooleanQuery(true); // disable coord
for (CategoryPath cp : paths) {
for (FacetLabel cp : paths) {
if (cp.length == 0) {
throw new IllegalArgumentException("all CategoryPaths must have length > 0");
}

View File

@ -18,11 +18,11 @@ package org.apache.lucene.facet.simple;
*/
import java.util.List;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
public final class SimpleFacetResult {
/** Path whose children we counted. */
public final CategoryPath path;
public final FacetLabel path;
/** Total value for this path (sum of all child counts, or
* sum of all child values), even those not included in
@ -32,7 +32,7 @@ public final class SimpleFacetResult {
/** Child counts. */
public final LabelAndValue[] labelValues;
public SimpleFacetResult(CategoryPath path, Number value, LabelAndValue[] labelValues) {
public SimpleFacetResult(FacetLabel path, Number value, LabelAndValue[] labelValues) {
this.path = path;
this.value = value;
this.labelValues = labelValues;

View File

@ -22,7 +22,7 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;

View File

@ -29,7 +29,7 @@ import java.util.Map;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.facet.simple.SortedSetDocValuesReaderState.OrdRange;
import org.apache.lucene.facet.simple.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues.MultiSortedSetDocValues;
@ -40,7 +40,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.PriorityQueue;
/** Compute facets counts from previously
* indexed {@link SortedSetDocValuesFacetFields},
* indexed {@link SortedSetDocValuesFacetField},
* without require a separate taxonomy index. Faceting is
* a bit slower (~25%), and there is added cost on every
* {@link IndexReader} open to create a new {@link
@ -130,7 +130,7 @@ public class SortedSetDocValuesFacetCounts extends Facets {
labelValues[i] = new LabelAndValue(s.substring(dim.length()+1, s.length()), ordAndCount.count);
}
return new SimpleFacetResult(new CategoryPath(dim), dimCount, labelValues);
return new SimpleFacetResult(new FacetLabel(dim), dimCount, labelValues);
}
/** Does all the "real work" of tallying up the counts. */

View File

@ -25,7 +25,7 @@ import java.util.List;
import java.util.Map;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.BinaryDocValues;
@ -100,7 +100,7 @@ public class TaxonomyFacetCounts extends Facets {
String dim = ent.getKey();
FacetsConfig.DimConfig ft = ent.getValue();
if (ft.hierarchical && ft.multiValued == false) {
int dimRootOrd = taxoReader.getOrdinal(new CategoryPath(dim));
int dimRootOrd = taxoReader.getOrdinal(new FacetLabel(dim));
// It can be -1 if this field was declared in the
// facetsConfig but never indexed:
if (dimRootOrd > 0) {
@ -125,7 +125,7 @@ public class TaxonomyFacetCounts extends Facets {
* this path doesn't exist, else the count. */
@Override
public Number getSpecificValue(String dim, String... path) throws IOException {
int ord = taxoReader.getOrdinal(CategoryPath.create(dim, path));
int ord = taxoReader.getOrdinal(FacetLabel.create(dim, path));
if (ord < 0) {
return -1;
}
@ -134,7 +134,7 @@ public class TaxonomyFacetCounts extends Facets {
@Override
public SimpleFacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
CategoryPath cp = CategoryPath.create(dim, path);
FacetLabel cp = FacetLabel.create(dim, path);
int ord = taxoReader.getOrdinal(cp);
if (ord == -1) {
//System.out.println("no ord for path=" + path);
@ -143,7 +143,7 @@ public class TaxonomyFacetCounts extends Facets {
return getTopChildren(cp, ord, topN);
}
private SimpleFacetResult getTopChildren(CategoryPath path, int dimOrd, int topN) throws IOException {
private SimpleFacetResult getTopChildren(FacetLabel path, int dimOrd, int topN) throws IOException {
TopOrdCountQueue q = new TopOrdCountQueue(topN);
@ -185,7 +185,7 @@ public class TaxonomyFacetCounts extends Facets {
LabelAndValue[] labelValues = new LabelAndValue[q.size()];
for(int i=labelValues.length-1;i>=0;i--) {
TopOrdCountQueue.OrdAndCount ordAndCount = q.pop();
CategoryPath child = taxoReader.getPath(ordAndCount.ord);
FacetLabel child = taxoReader.getPath(ordAndCount.ord);
labelValues[i] = new LabelAndValue(child.components[path.length], ordAndCount.count);
}

View File

@ -26,7 +26,7 @@ import java.util.List;
import java.util.Map;
import org.apache.lucene.facet.simple.SimpleFacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.BinaryDocValues;
@ -129,7 +129,7 @@ public class TaxonomyFacetSumValueSource extends Facets {
String dim = ent.getKey();
FacetsConfig.DimConfig ft = ent.getValue();
if (ft.hierarchical && ft.multiValued == false) {
int dimRootOrd = taxoReader.getOrdinal(new CategoryPath(dim));
int dimRootOrd = taxoReader.getOrdinal(new FacetLabel(dim));
assert dimRootOrd > 0;
values[dimRootOrd] += rollup(children[dimRootOrd]);
}
@ -149,7 +149,7 @@ public class TaxonomyFacetSumValueSource extends Facets {
@Override
public Number getSpecificValue(String dim, String... path) throws IOException {
int ord = taxoReader.getOrdinal(CategoryPath.create(dim, path));
int ord = taxoReader.getOrdinal(FacetLabel.create(dim, path));
if (ord < 0) {
return -1;
}
@ -158,7 +158,7 @@ public class TaxonomyFacetSumValueSource extends Facets {
@Override
public SimpleFacetResult getTopChildren(int topN, String dim, String... path) throws IOException {
CategoryPath cp = CategoryPath.create(dim, path);
FacetLabel cp = FacetLabel.create(dim, path);
int ord = taxoReader.getOrdinal(cp);
if (ord == -1) {
return null;
@ -166,7 +166,7 @@ public class TaxonomyFacetSumValueSource extends Facets {
return getTopChildren(cp, ord, topN);
}
private SimpleFacetResult getTopChildren(CategoryPath path, int dimOrd, int topN) throws IOException {
private SimpleFacetResult getTopChildren(FacetLabel path, int dimOrd, int topN) throws IOException {
TopOrdValueQueue q = new TopOrdValueQueue(topN);
@ -207,7 +207,7 @@ public class TaxonomyFacetSumValueSource extends Facets {
LabelAndValue[] labelValues = new LabelAndValue[q.size()];
for(int i=labelValues.length-1;i>=0;i--) {
TopOrdValueQueue.OrdAndValue ordAndValue = q.pop();
CategoryPath child = taxoReader.getPath(ordAndValue.ord);
FacetLabel child = taxoReader.getPath(ordAndValue.ord);
labelValues[i] = new LabelAndValue(child.components[path.length], ordAndValue.value);
}

View File

@ -33,7 +33,7 @@ import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.search.FacetsAccumulator;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues;
@ -234,7 +234,7 @@ public class SortedSetDocValuesAccumulator extends FacetsAccumulator {
dimCount += counts[ord];
FacetResultNode node = new FacetResultNode(ord, counts[ord]);
dv.lookupOrd(ord, scratch);
node.label = new CategoryPath(scratch.utf8ToString().split(state.separatorRegex, 2));
node.label = new FacetLabel(scratch.utf8ToString().split(state.separatorRegex, 2));
nodes.add(node);
}
}
@ -258,7 +258,7 @@ public class SortedSetDocValuesAccumulator extends FacetsAccumulator {
}
FacetResultNode rootNode = new FacetResultNode(-1, dimCount);
rootNode.label = new CategoryPath(new String[] {dim});
rootNode.label = new FacetLabel(new String[] {dim});
rootNode.subResults = nodes;
results.add(new FacetResult(request, rootNode, nodes.size()));
continue;
@ -300,13 +300,13 @@ public class SortedSetDocValuesAccumulator extends FacetsAccumulator {
}
FacetResultNode rootNode = new FacetResultNode(-1, dimCount);
rootNode.label = new CategoryPath(new String[] {dim});
rootNode.label = new FacetLabel(new String[] {dim});
FacetResultNode[] childNodes = new FacetResultNode[q.size()];
for(int i=childNodes.length-1;i>=0;i--) {
childNodes[i] = q.pop();
dv.lookupOrd(childNodes[i].ordinal, scratch);
childNodes[i].label = new CategoryPath(scratch.utf8ToString().split(state.separatorRegex, 2));
childNodes[i].label = new FacetLabel(scratch.utf8ToString().split(state.separatorRegex, 2));
}
rootNode.subResults = Arrays.asList(childNodes);

View File

@ -28,7 +28,7 @@ import org.apache.lucene.facet.index.DrillDownStream;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.util.BytesRef;
/** Use this to index facets if you intend to
@ -41,13 +41,13 @@ import org.apache.lucene.util.BytesRef;
public class SortedSetDocValuesFacetFields extends FacetFields {
/** Create a {@code SortedSetDocValuesFacetField} with the
* provided {@link CategoryPath}. */
* provided {@link FacetLabel}. */
public SortedSetDocValuesFacetFields() {
this(FacetIndexingParams.DEFAULT);
}
/** Create a {@code SortedSetDocValuesFacetField} with the
* provided {@link CategoryPath}, and custom {@link
* provided {@link FacetLabel}, and custom {@link
* FacetIndexingParams}. */
public SortedSetDocValuesFacetFields(FacetIndexingParams fip) {
super(null, fip);
@ -57,19 +57,19 @@ public class SortedSetDocValuesFacetFields extends FacetFields {
}
@Override
public void addFields(Document doc, Iterable<CategoryPath> categories) throws IOException {
public void addFields(Document doc, Iterable<FacetLabel> categories) throws IOException {
if (categories == null) {
throw new IllegalArgumentException("categories should not be null");
}
final Map<CategoryListParams,Iterable<CategoryPath>> categoryLists = createCategoryListMapping(categories);
for (Entry<CategoryListParams, Iterable<CategoryPath>> e : categoryLists.entrySet()) {
final Map<CategoryListParams,Iterable<FacetLabel>> categoryLists = createCategoryListMapping(categories);
for (Entry<CategoryListParams, Iterable<FacetLabel>> e : categoryLists.entrySet()) {
CategoryListParams clp = e.getKey();
String dvField = clp.field + SortedSetDocValuesReaderState.FACET_FIELD_EXTENSION;
// Add sorted-set DV fields, one per value:
for(CategoryPath cp : e.getValue()) {
for(FacetLabel cp : e.getValue()) {
if (cp.length != 2) {
throw new IllegalArgumentException("only flat facets (dimension + label) are currently supported; got " + cp);
}

View File

@ -28,8 +28,8 @@ import java.util.regex.Pattern;
*
* @lucene.experimental
*/
// nocommit rename to FacetLabel?
public class CategoryPath implements Comparable<CategoryPath> {
// nocommit rename to just Label under .facet?
public class FacetLabel implements Comparable<FacetLabel> {
/*
* copied from DocumentWriterPerThread -- if a CategoryPath is resolved to a
@ -38,33 +38,33 @@ public class CategoryPath implements Comparable<CategoryPath> {
* be on the safe side.
*/
/**
* The maximum number of characters a {@link CategoryPath} can have. That is
* {@link CategoryPath#toString(char)} length must not exceed that limit.
* The maximum number of characters a {@link FacetLabel} can have. That is
* {@link FacetLabel#toString(char)} length must not exceed that limit.
*/
public final static int MAX_CATEGORY_PATH_LENGTH = (BYTE_BLOCK_SIZE - 2) / 4;
/** An empty {@link CategoryPath} */
public static final CategoryPath EMPTY = new CategoryPath();
/** An empty {@link FacetLabel} */
public static final FacetLabel EMPTY = new FacetLabel();
/**
* The components of this {@link CategoryPath}. Note that this array may be
* shared with other {@link CategoryPath} instances, e.g. as a result of
* The components of this {@link FacetLabel}. Note that this array may be
* shared with other {@link FacetLabel} instances, e.g. as a result of
* {@link #subpath(int)}, therefore you should traverse the array up to
* {@link #length} for this path's components.
*/
public final String[] components;
/** The number of components of this {@link CategoryPath}. */
/** The number of components of this {@link FacetLabel}. */
public final int length;
// Used by singleton EMPTY
private CategoryPath() {
private FacetLabel() {
components = null;
length = 0;
}
// Used by subpath
private CategoryPath(final CategoryPath copyFrom, final int prefixLen) {
private FacetLabel(final FacetLabel copyFrom, final int prefixLen) {
// while the code which calls this method is safe, at some point a test
// tripped on AIOOBE in toString, but we failed to reproduce. adding the
// assert as a safety check.
@ -76,7 +76,7 @@ public class CategoryPath implements Comparable<CategoryPath> {
}
/** Construct from the given path components. */
public CategoryPath(final String... components) {
public FacetLabel(final String... components) {
assert components.length > 0 : "use CategoryPath.EMPTY to create an empty path";
long len = 0;
for (String comp : components) {
@ -96,15 +96,15 @@ public class CategoryPath implements Comparable<CategoryPath> {
}
// nocommit javadocs/rename
public static CategoryPath create(String dim, String... path) {
public static FacetLabel create(String dim, String... path) {
String[] components = new String[1+path.length];
components[0] = dim;
System.arraycopy(path, 0, components, 1, path.length);
return new CategoryPath(components);
return new FacetLabel(components);
}
/** Construct from a given path, separating path components with {@code delimiter}. */
public CategoryPath(final String pathString, final char delimiter) {
public FacetLabel(final String pathString, final char delimiter) {
if (pathString.length() > MAX_CATEGORY_PATH_LENGTH) {
throw new IllegalArgumentException("category path exceeds maximum allowed path length: max="
+ MAX_CATEGORY_PATH_LENGTH + " len=" + pathString.length()
@ -143,11 +143,11 @@ public class CategoryPath implements Comparable<CategoryPath> {
}
/**
* Compares this path with another {@link CategoryPath} for lexicographic
* Compares this path with another {@link FacetLabel} for lexicographic
* order.
*/
@Override
public int compareTo(CategoryPath other) {
public int compareTo(FacetLabel other) {
final int len = length < other.length ? length : other.length;
for (int i = 0, j = 0; i < len; i++, j++) {
int cmp = components[i].compareTo(other.components[j]);
@ -203,11 +203,11 @@ public class CategoryPath implements Comparable<CategoryPath> {
@Override
public boolean equals(Object obj) {
if (!(obj instanceof CategoryPath)) {
if (!(obj instanceof FacetLabel)) {
return false;
}
CategoryPath other = (CategoryPath) obj;
FacetLabel other = (FacetLabel) obj;
if (length != other.length) {
return false; // not same length, cannot be equal
}
@ -249,13 +249,13 @@ public class CategoryPath implements Comparable<CategoryPath> {
}
/** Returns a sub-path of this path up to {@code length} components. */
public CategoryPath subpath(final int length) {
public FacetLabel subpath(final int length) {
if (length >= this.length || length < 0) {
return this;
} else if (length == 0) {
return EMPTY;
} else {
return new CategoryPath(this, length);
return new FacetLabel(this, length);
}
}

View File

@ -92,7 +92,7 @@ public abstract class TaxonomyReader implements Closeable {
/**
* The root category (the category with the empty path) always has the ordinal
* 0, to which we give a name ROOT_ORDINAL. {@link #getOrdinal(CategoryPath)}
* 0, to which we give a name ROOT_ORDINAL. {@link #getOrdinal(FacetLabel)}
* of an empty path will always return {@code ROOT_ORDINAL}, and
* {@link #getPath(int)} with {@code ROOT_ORDINAL} will return the empty path.
*/
@ -215,10 +215,10 @@ public abstract class TaxonomyReader implements Closeable {
* @return the category's ordinal or {@link #INVALID_ORDINAL} if the category
* wasn't foun.
*/
public abstract int getOrdinal(CategoryPath categoryPath) throws IOException;
public abstract int getOrdinal(FacetLabel categoryPath) throws IOException;
/** Returns the path name of the category with the given ordinal. */
public abstract CategoryPath getPath(int ordinal) throws IOException;
public abstract FacetLabel getPath(int ordinal) throws IOException;
/** Returns the current refCount for this taxonomy reader. */
public final int getRefCount() {

View File

@ -64,7 +64,7 @@ public interface TaxonomyWriter extends Closeable, TwoPhaseCommit {
* ordinal of a category is guaranteed to be smaller then the ordinal of
* any of its descendants.
*/
public int addCategory(CategoryPath categoryPath) throws IOException;
public int addCategory(FacetLabel categoryPath) throws IOException;
/**
* getParent() returns the ordinal of the parent category of the category

View File

@ -6,7 +6,7 @@ import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.lucene.facet.collections.LRUHashMap;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.index.CorruptIndexException; // javadocs
@ -60,8 +60,8 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
private final DirectoryReader indexReader;
// TODO: test DoubleBarrelLRUCache and consider using it instead
private LRUHashMap<CategoryPath, Integer> ordinalCache;
private LRUHashMap<Integer, CategoryPath> categoryCache;
private LRUHashMap<FacetLabel, Integer> ordinalCache;
private LRUHashMap<Integer, FacetLabel> categoryCache;
private volatile TaxonomyIndexArrays taxoArrays;
@ -73,15 +73,15 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
* arrays.
*/
DirectoryTaxonomyReader(DirectoryReader indexReader, DirectoryTaxonomyWriter taxoWriter,
LRUHashMap<CategoryPath,Integer> ordinalCache, LRUHashMap<Integer,CategoryPath> categoryCache,
LRUHashMap<FacetLabel,Integer> ordinalCache, LRUHashMap<Integer,FacetLabel> categoryCache,
TaxonomyIndexArrays taxoArrays) throws IOException {
this.indexReader = indexReader;
this.taxoWriter = taxoWriter;
this.taxoEpoch = taxoWriter == null ? -1 : taxoWriter.getTaxonomyEpoch();
// use the same instance of the cache, note the protective code in getOrdinal and getPath
this.ordinalCache = ordinalCache == null ? new LRUHashMap<CategoryPath,Integer>(DEFAULT_CACHE_VALUE) : ordinalCache;
this.categoryCache = categoryCache == null ? new LRUHashMap<Integer,CategoryPath>(DEFAULT_CACHE_VALUE) : categoryCache;
this.ordinalCache = ordinalCache == null ? new LRUHashMap<FacetLabel,Integer>(DEFAULT_CACHE_VALUE) : ordinalCache;
this.categoryCache = categoryCache == null ? new LRUHashMap<Integer,FacetLabel>(DEFAULT_CACHE_VALUE) : categoryCache;
this.taxoArrays = taxoArrays != null ? new TaxonomyIndexArrays(indexReader, taxoArrays) : null;
}
@ -103,8 +103,8 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
// These are the default cache sizes; they can be configured after
// construction with the cache's setMaxSize() method
ordinalCache = new LRUHashMap<CategoryPath, Integer>(DEFAULT_CACHE_VALUE);
categoryCache = new LRUHashMap<Integer, CategoryPath>(DEFAULT_CACHE_VALUE);
ordinalCache = new LRUHashMap<FacetLabel, Integer>(DEFAULT_CACHE_VALUE);
categoryCache = new LRUHashMap<Integer, FacetLabel>(DEFAULT_CACHE_VALUE);
}
/**
@ -122,8 +122,8 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
// These are the default cache sizes; they can be configured after
// construction with the cache's setMaxSize() method
ordinalCache = new LRUHashMap<CategoryPath, Integer>(DEFAULT_CACHE_VALUE);
categoryCache = new LRUHashMap<Integer, CategoryPath>(DEFAULT_CACHE_VALUE);
ordinalCache = new LRUHashMap<FacetLabel, Integer>(DEFAULT_CACHE_VALUE);
categoryCache = new LRUHashMap<Integer, FacetLabel>(DEFAULT_CACHE_VALUE);
}
private synchronized void initTaxoArrays() throws IOException {
@ -242,7 +242,7 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
}
@Override
public int getOrdinal(CategoryPath cp) throws IOException {
public int getOrdinal(FacetLabel cp) throws IOException {
ensureOpen();
if (cp.length == 0) {
return ROOT_ORDINAL;
@ -288,7 +288,7 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
}
@Override
public CategoryPath getPath(int ordinal) throws IOException {
public FacetLabel getPath(int ordinal) throws IOException {
ensureOpen();
// Since the cache is shared with DTR instances allocated from
@ -303,14 +303,14 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
// wrapped as LRU?
Integer catIDInteger = Integer.valueOf(ordinal);
synchronized (categoryCache) {
CategoryPath res = categoryCache.get(catIDInteger);
FacetLabel res = categoryCache.get(catIDInteger);
if (res != null) {
return res;
}
}
StoredDocument doc = indexReader.document(ordinal);
CategoryPath ret = new CategoryPath(doc.get(Consts.FULL), delimiter);
FacetLabel ret = new FacetLabel(doc.get(Consts.FULL), delimiter);
synchronized (categoryCache) {
categoryCache.put(catIDInteger, ret);
}
@ -326,7 +326,7 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
/**
* setCacheSize controls the maximum allowed size of each of the caches
* used by {@link #getPath(int)} and {@link #getOrdinal(CategoryPath)}.
* used by {@link #getPath(int)} and {@link #getOrdinal(FacetLabel)}.
* <P>
* Currently, if the given size is smaller than the current size of
* a cache, it will not shrink, and rather we be limited to its current
@ -364,7 +364,7 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
int upperl = Math.min(max, indexReader.maxDoc());
for (int i = 0; i < upperl; i++) {
try {
CategoryPath category = this.getPath(i);
FacetLabel category = this.getPath(i);
if (category == null) {
sb.append(i + ": NULL!! \n");
continue;

View File

@ -21,7 +21,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.writercache.TaxonomyWriterCache;
@ -248,7 +248,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
cacheIsComplete = true;
// Make sure that the taxonomy always contain the root category
// with category id 0.
addCategory(CategoryPath.EMPTY);
addCategory(FacetLabel.EMPTY);
} else {
// There are some categories on the disk, which we have not yet
// read into the cache, and therefore the cache is incomplete.
@ -388,7 +388,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
* returning the category's ordinal, or a negative number in case the
* category does not yet exist in the taxonomy.
*/
protected synchronized int findCategory(CategoryPath categoryPath) throws IOException {
protected synchronized int findCategory(FacetLabel categoryPath) throws IOException {
// If we can find the category in the cache, or we know the cache is
// complete, we can return the response directly from it
int res = cache.get(categoryPath);
@ -447,7 +447,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
}
@Override
public int addCategory(CategoryPath categoryPath) throws IOException {
public int addCategory(FacetLabel categoryPath) throws IOException {
ensureOpen();
// check the cache outside the synchronized block. this results in better
// concurrency when categories are there.
@ -479,14 +479,14 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
* parent is always added to the taxonomy before its child). We do this by
* recursion.
*/
private int internalAddCategory(CategoryPath cp) throws IOException {
private int internalAddCategory(FacetLabel cp) throws IOException {
// Find our parent's ordinal (recursively adding the parent category
// to the taxonomy if it's not already there). Then add the parent
// ordinal as payloads (rather than a stored field; payloads can be
// more efficiently read into memory in bulk by LuceneTaxonomyReader)
int parent;
if (cp.length > 1) {
CategoryPath parentPath = cp.subpath(cp.length - 1);
FacetLabel parentPath = cp.subpath(cp.length - 1);
parent = findCategory(parentPath);
if (parent < 0) {
parent = internalAddCategory(parentPath);
@ -515,7 +515,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
* Note that the methods calling addCategoryDocument() are synchornized, so
* this method is effectively synchronized as well.
*/
private int addCategoryDocument(CategoryPath categoryPath, int parent) throws IOException {
private int addCategoryDocument(FacetLabel categoryPath, int parent) throws IOException {
// Before Lucene 2.9, position increments >=0 were supported, so we
// added 1 to parent to allow the parent -1 (the parent of the root).
// Unfortunately, starting with Lucene 2.9, after LUCENE-1542, this is
@ -596,7 +596,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
}
}
private void addToCache(CategoryPath categoryPath, int id) throws IOException {
private void addToCache(FacetLabel categoryPath, int id) throws IOException {
if (cache.put(categoryPath, id)) {
// If cache.put() returned true, it means the cache was limited in
// size, became full, and parts of it had to be evicted. It is
@ -729,7 +729,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
// hence documents), there are no deletions in the index. Therefore, it
// is sufficient to call next(), and then doc(), exactly once with no
// 'validation' checks.
CategoryPath cp = new CategoryPath(t.utf8ToString(), delimiter);
FacetLabel cp = new FacetLabel(t.utf8ToString(), delimiter);
docsEnum = termsEnum.docs(null, docsEnum, DocsEnum.FLAG_NONE);
boolean res = cache.put(cp, docsEnum.nextDoc() + ctx.docBase);
assert !res : "entries should not have been evicted from the cache";
@ -819,7 +819,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
te = terms.iterator(te);
while (te.next() != null) {
String value = te.term().utf8ToString();
CategoryPath cp = new CategoryPath(value, delimiter);
FacetLabel cp = new FacetLabel(value, delimiter);
final int ordinal = addCategory(cp);
docs = te.docs(null, docs, DocsEnum.FLAG_NONE);
ordinalMap.addMapping(docs.nextDoc() + base, ordinal);

View File

@ -1,6 +1,6 @@
package org.apache.lucene.facet.taxonomy.writercache;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
/*
@ -37,7 +37,7 @@ import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
* <p>
* However, if it does so, it should clear out large parts of the cache at once,
* because the user will typically need to work hard to recover from every cache
* cleanup (see {@link #put(CategoryPath, int)}'s return value).
* cleanup (see {@link #put(FacetLabel, int)}'s return value).
* <p>
* <b>NOTE:</b> the cache may be accessed concurrently by multiple threads,
* therefore cache implementations should take this into consideration.
@ -62,7 +62,7 @@ public interface TaxonomyWriterCache {
* it means the category does not exist. Otherwise, the category might
* still exist, but just be missing from the cache.
*/
public int get(CategoryPath categoryPath);
public int get(FacetLabel categoryPath);
/**
* Add a category to the cache, with the given ordinal as the value.
@ -82,7 +82,7 @@ public interface TaxonomyWriterCache {
* It doesn't really matter, because normally the next thing we do after
* finding that a category does not exist is to add it.
*/
public boolean put(CategoryPath categoryPath, int ordinal);
public boolean put(FacetLabel categoryPath, int ordinal);
/**
* Returns true if the cache is full, such that the next {@link #put} will

View File

@ -1,6 +1,6 @@
package org.apache.lucene.facet.taxonomy.writercache.cl2o;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -19,11 +19,11 @@ import org.apache.lucene.facet.taxonomy.CategoryPath;
* limitations under the License.
*/
/** Utilities for use of {@link CategoryPath} by {@link CompactLabelToOrdinal}. */
/** Utilities for use of {@link FacetLabel} by {@link CompactLabelToOrdinal}. */
class CategoryPathUtils {
/** Serializes the given {@link CategoryPath} to the {@link CharBlockArray}. */
public static void serialize(CategoryPath cp, CharBlockArray charBlockArray) {
/** Serializes the given {@link FacetLabel} to the {@link CharBlockArray}. */
public static void serialize(FacetLabel cp, CharBlockArray charBlockArray) {
charBlockArray.append((char) cp.length);
if (cp.length == 0) {
return;
@ -36,7 +36,7 @@ class CategoryPathUtils {
/**
* Calculates a hash function of a path that was serialized with
* {@link #serialize(CategoryPath, CharBlockArray)}.
* {@link #serialize(FacetLabel, CharBlockArray)}.
*/
public static int hashCodeOfSerialized(CharBlockArray charBlockArray, int offset) {
int length = charBlockArray.charAt(offset++);
@ -54,10 +54,10 @@ class CategoryPathUtils {
}
/**
* Check whether the {@link CategoryPath} is equal to the one serialized in
* Check whether the {@link FacetLabel} is equal to the one serialized in
* {@link CharBlockArray}.
*/
public static boolean equalsToSerialized(CategoryPath cp, CharBlockArray charBlockArray, int offset) {
public static boolean equalsToSerialized(FacetLabel cp, CharBlockArray charBlockArray, int offset) {
int n = charBlockArray.charAt(offset++);
if (cp.length != n) {
return false;

View File

@ -3,7 +3,7 @@ package org.apache.lucene.facet.taxonomy.writercache.cl2o;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.writercache.TaxonomyWriterCache;
/*
@ -68,7 +68,7 @@ public class Cl2oTaxonomyWriterCache implements TaxonomyWriterCache {
}
@Override
public int get(CategoryPath categoryPath) {
public int get(FacetLabel categoryPath) {
lock.readLock().lock();
try {
return cache.getOrdinal(categoryPath);
@ -78,7 +78,7 @@ public class Cl2oTaxonomyWriterCache implements TaxonomyWriterCache {
}
@Override
public boolean put(CategoryPath categoryPath, int ordinal) {
public boolean put(FacetLabel categoryPath, int ordinal) {
lock.writeLock().lock();
try {
cache.addLabel(categoryPath, ordinal);

View File

@ -3,7 +3,7 @@ package org.apache.lucene.facet.taxonomy.writercache.cl2o;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -103,7 +103,7 @@ public class CollisionMap {
this.threshold = (int) (this.capacity * this.loadFactor);
}
public int get(CategoryPath label, int hash) {
public int get(FacetLabel label, int hash) {
int bucketIndex = indexFor(hash, this.capacity);
Entry e = this.entries[bucketIndex];
@ -117,7 +117,7 @@ public class CollisionMap {
return e.cid;
}
public int addLabel(CategoryPath label, int hash, int cid) {
public int addLabel(FacetLabel label, int hash, int cid) {
int bucketIndex = indexFor(hash, this.capacity);
for (Entry e = this.entries[bucketIndex]; e != null; e = e.next) {
if (e.hash == hash && CategoryPathUtils.equalsToSerialized(label, labelRepository, e.offset)) {

View File

@ -27,7 +27,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Iterator;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/**
* This is a very efficient LabelToOrdinal implementation that uses a
@ -101,7 +101,7 @@ public class CompactLabelToOrdinal extends LabelToOrdinal {
private void init() {
labelRepository = new CharBlockArray();
CategoryPathUtils.serialize(CategoryPath.EMPTY, labelRepository);
CategoryPathUtils.serialize(FacetLabel.EMPTY, labelRepository);
int c = this.capacity;
for (int i = 0; i < this.hashArrays.length; i++) {
@ -111,7 +111,7 @@ public class CompactLabelToOrdinal extends LabelToOrdinal {
}
@Override
public void addLabel(CategoryPath label, int ordinal) {
public void addLabel(FacetLabel label, int ordinal) {
if (collisionMap.size() > threshold) {
grow();
}
@ -130,7 +130,7 @@ public class CompactLabelToOrdinal extends LabelToOrdinal {
}
@Override
public int getOrdinal(CategoryPath label) {
public int getOrdinal(FacetLabel label) {
if (label == null) {
return LabelToOrdinal.INVALID_ORDINAL;
}
@ -199,7 +199,7 @@ public class CompactLabelToOrdinal extends LabelToOrdinal {
}
}
private boolean addLabel(HashArray a, CategoryPath label, int hash, int ordinal) {
private boolean addLabel(HashArray a, FacetLabel label, int hash, int ordinal) {
int index = CompactLabelToOrdinal.indexFor(hash, a.offsets.length);
int offset = a.offsets[index];
@ -243,7 +243,7 @@ public class CompactLabelToOrdinal extends LabelToOrdinal {
return false;
}
private int getOrdinal(HashArray a, CategoryPath label, int hash) {
private int getOrdinal(HashArray a, FacetLabel label, int hash) {
if (label == null) {
return LabelToOrdinal.INVALID_ORDINAL;
}
@ -280,7 +280,7 @@ public class CompactLabelToOrdinal extends LabelToOrdinal {
//
// }
static int stringHashCode(CategoryPath label) {
static int stringHashCode(FacetLabel label) {
int hash = label.hashCode();
hash = hash ^ ((hash >>> 20) ^ (hash >>> 12));

View File

@ -1,6 +1,6 @@
package org.apache.lucene.facet.taxonomy.writercache.cl2o;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -49,12 +49,12 @@ public abstract class LabelToOrdinal {
* Throws an {@link IllegalArgumentException} if the same label with
* a different ordinal was previoulsy added to this table.
*/
public abstract void addLabel(CategoryPath label, int ordinal);
public abstract void addLabel(FacetLabel label, int ordinal);
/**
* @return the ordinal assigned to the given label,
* or {@link #INVALID_ORDINAL} if the label cannot be found in this table.
*/
public abstract int getOrdinal(CategoryPath label);
public abstract int getOrdinal(FacetLabel label);
}

View File

@ -1,6 +1,6 @@
package org.apache.lucene.facet.taxonomy.writercache.lru;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.writercache.TaxonomyWriterCache;
/*
@ -77,7 +77,7 @@ public class LruTaxonomyWriterCache implements TaxonomyWriterCache {
}
@Override
public synchronized int get(CategoryPath categoryPath) {
public synchronized int get(FacetLabel categoryPath) {
Integer res = cache.get(categoryPath);
if (res == null) {
return -1;
@ -87,7 +87,7 @@ public class LruTaxonomyWriterCache implements TaxonomyWriterCache {
}
@Override
public synchronized boolean put(CategoryPath categoryPath, int ordinal) {
public synchronized boolean put(FacetLabel categoryPath, int ordinal) {
boolean ret = cache.put(categoryPath, new Integer(ordinal));
// If the cache is full, we need to clear one or more old entries
// from the cache. However, if we delete from the cache a recent

View File

@ -1,6 +1,6 @@
package org.apache.lucene.facet.taxonomy.writercache.lru;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -35,12 +35,12 @@ public class NameHashIntCacheLRU extends NameIntCacheLRU {
}
@Override
Object key(CategoryPath name) {
Object key(FacetLabel name) {
return new Long(name.longHashCode());
}
@Override
Object key(CategoryPath name, int prefixLen) {
Object key(FacetLabel name, int prefixLen) {
return new Long(name.subpath(prefixLen).longHashCode());
}

View File

@ -3,7 +3,7 @@ package org.apache.lucene.facet.taxonomy.writercache.lru;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
@ -58,7 +58,7 @@ class NameIntCacheLRU {
}
}
Integer get (CategoryPath name) {
Integer get (FacetLabel name) {
Integer res = cache.get(key(name));
if (res==null) {
nMisses ++;
@ -69,11 +69,11 @@ class NameIntCacheLRU {
}
/** Subclasses can override this to provide caching by e.g. hash of the string. */
Object key(CategoryPath name) {
Object key(FacetLabel name) {
return name;
}
Object key(CategoryPath name, int prefixLen) {
Object key(FacetLabel name, int prefixLen) {
return name.subpath(prefixLen);
}
@ -81,12 +81,12 @@ class NameIntCacheLRU {
* Add a new value to cache.
* Return true if cache became full and some room need to be made.
*/
boolean put (CategoryPath name, Integer val) {
boolean put (FacetLabel name, Integer val) {
cache.put(key(name), val);
return isCacheFull();
}
boolean put (CategoryPath name, int prefixLen, Integer val) {
boolean put (FacetLabel name, int prefixLen, Integer val) {
cache.put(key(name, prefixLen), val);
return isCacheFull();
}

View File

@ -21,7 +21,7 @@ import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyReader.ChildrenIterator;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -65,7 +65,7 @@ public class PrintTaxonomyStats {
while (chilrenIt.next() != TaxonomyReader.INVALID_ORDINAL) {
numImmediateChildren++;
}
CategoryPath cp = r.getPath(child);
FacetLabel cp = r.getPath(child);
out.println("/" + cp + ": " + numImmediateChildren + " immediate children; " + (1+countAllChildren(r, child)) + " total categories");
if (printTree) {
printAllChildren(out, r, child, " ", 1);

View File

@ -25,7 +25,7 @@ import org.apache.lucene.facet.params.CategoryListParams.OrdinalPolicy;
import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -115,13 +115,13 @@ public abstract class FacetTestBase extends FacetTestCase {
};
/** Facets: facets[D][F] == category-path no. F for document no. D. */
private static final CategoryPath[][] DEFAULT_CATEGORIES = {
{ new CategoryPath("root","a","f1"), new CategoryPath("root","a","f2") },
{ new CategoryPath("root","a","f1"), new CategoryPath("root","a","f3") },
private static final FacetLabel[][] DEFAULT_CATEGORIES = {
{ new FacetLabel("root","a","f1"), new FacetLabel("root","a","f2") },
{ new FacetLabel("root","a","f1"), new FacetLabel("root","a","f3") },
};
/** categories to be added to specified doc */
protected List<CategoryPath> getCategories(int doc) {
protected List<FacetLabel> getCategories(int doc) {
return Arrays.asList(DEFAULT_CATEGORIES[doc]);
}
@ -284,7 +284,7 @@ public abstract class FacetTestBase extends FacetTestCase {
/** utility Create a dummy document with specified categories and content */
protected final void indexDoc(FacetIndexingParams iParams, RandomIndexWriter iw,
TaxonomyWriter tw, String content, List<CategoryPath> categories) throws IOException {
TaxonomyWriter tw, String content, List<FacetLabel> categories) throws IOException {
Document d = new Document();
FacetFields facetFields = new FacetFields(tw, iParams);
facetFields.addFields(d, categories);
@ -293,10 +293,10 @@ public abstract class FacetTestBase extends FacetTestCase {
}
/** Build the "truth" with ALL the facets enumerating indexes content. */
protected Map<CategoryPath, Integer> facetCountsTruth() throws IOException {
protected Map<FacetLabel, Integer> facetCountsTruth() throws IOException {
FacetIndexingParams iParams = getFacetIndexingParams(Integer.MAX_VALUE);
String delim = String.valueOf(iParams.getFacetDelimChar());
Map<CategoryPath, Integer> res = new HashMap<CategoryPath, Integer>();
Map<FacetLabel, Integer> res = new HashMap<FacetLabel, Integer>();
HashSet<String> handledTerms = new HashSet<String>();
for (CategoryListParams clp : iParams.getAllCategoryListParams()) {
if (!handledTerms.add(clp.field)) {
@ -315,14 +315,14 @@ public abstract class FacetTestBase extends FacetTestCase {
while (de.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
cnt++;
}
res.put(new CategoryPath(te.term().utf8ToString().split(delim)), cnt);
res.put(new FacetLabel(te.term().utf8ToString().split(delim)), cnt);
}
}
return res;
}
/** Validate counts for returned facets, and that there are not too many results */
protected static void assertCountsAndCardinality(Map<CategoryPath, Integer> facetCountsTruth, List<FacetResult> facetResults) throws Exception {
protected static void assertCountsAndCardinality(Map<FacetLabel, Integer> facetCountsTruth, List<FacetResult> facetResults) throws Exception {
for (FacetResult fr : facetResults) {
FacetResultNode topResNode = fr.getFacetResultNode();
FacetRequest freq = fr.getFacetRequest();
@ -334,7 +334,7 @@ public abstract class FacetTestBase extends FacetTestCase {
}
/** Validate counts for returned facets, and that there are not too many results */
private static void assertCountsAndCardinality(Map<CategoryPath,Integer> facetCountsTruth, FacetResultNode resNode, int reqNumResults) throws Exception {
private static void assertCountsAndCardinality(Map<FacetLabel,Integer> facetCountsTruth, FacetResultNode resNode, int reqNumResults) throws Exception {
int actualNumResults = resNode.subResults.size();
if (VERBOSE) {
System.out.println("NumResults: " + actualNumResults);

View File

@ -9,7 +9,7 @@ import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -47,10 +47,10 @@ public class AssociationsFacetRequestTest extends FacetTestCase {
private static IndexReader reader;
private static Directory taxoDir;
private static final CategoryPath aint = new CategoryPath("int", "a");
private static final CategoryPath bint = new CategoryPath("int", "b");
private static final CategoryPath afloat = new CategoryPath("float", "a");
private static final CategoryPath bfloat = new CategoryPath("float", "b");
private static final FacetLabel aint = new FacetLabel("int", "a");
private static final FacetLabel bint = new FacetLabel("int", "b");
private static final FacetLabel afloat = new FacetLabel("float", "a");
private static final FacetLabel bfloat = new FacetLabel("float", "b");
@BeforeClass
public static void beforeClassAssociationsFacetRequestTest() throws Exception {

View File

@ -11,7 +11,7 @@ import org.apache.lucene.facet.search.CountFacetRequest;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.ParallelAtomicReader;
@ -111,7 +111,7 @@ public class TestFacetsAccumulatorWithComplement extends FacetTestBase {
/** compute facets with certain facet requests and docs */
private List<FacetResult> findFacets(boolean withComplement) throws IOException {
FacetSearchParams fsp = new FacetSearchParams(fip, new CountFacetRequest(new CategoryPath("root","a"), 10));
FacetSearchParams fsp = new FacetSearchParams(fip, new CountFacetRequest(new FacetLabel("root","a"), 10));
OldFacetsAccumulator sfa = new OldFacetsAccumulator(fsp, indexReader, taxoReader);
sfa.setComplementThreshold(withComplement ? OldFacetsAccumulator.FORCE_COMPLEMENT : OldFacetsAccumulator.DISABLE_COMPLEMENT);
FacetsCollector fc = FacetsCollector.create(sfa);

View File

@ -12,7 +12,7 @@ import org.apache.lucene.facet.complements.TotalFacetCountsCache;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -72,7 +72,7 @@ public class TestTotalFacetCounts extends FacetTestCase {
}
@Override
public CategoryListParams getCategoryListParams(CategoryPath category) {
public CategoryListParams getCategoryListParams(FacetLabel category) {
return new CategoryListParams() {
@Override
public OrdinalPolicy getOrdinalPolicy(String dimension) {
@ -89,7 +89,7 @@ public class TestTotalFacetCounts extends FacetTestCase {
FacetFields facetFields = new FacetFields(taxoWriter, iParams);
for (String cat : categories) {
Document doc = new Document();
facetFields.addFields(doc, Collections.singletonList(new CategoryPath(cat, '/')));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel(cat, '/')));
indexWriter.addDocument(doc);
}

View File

@ -21,7 +21,7 @@ import org.apache.lucene.facet.search.CountFacetRequest;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -91,7 +91,7 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
TaxonomyWriter tw, String... strings) throws IOException {
Document doc = new Document();
FacetFields facetFields = new FacetFields(tw, iParams);
facetFields.addFields(doc, Collections.singletonList(new CategoryPath(strings)));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel(strings)));
iw.addDocument(doc);
}
@ -129,7 +129,7 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
for (String cat : CATEGORIES) {
Document doc = new Document();
facetFields.addFields(doc, Collections.singletonList(new CategoryPath(cat, '/')));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel(cat, '/')));
indexWriter.addDocument(doc);
}
@ -173,8 +173,8 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
@Override
public void run() {
try {
FacetSearchParams fsp = new FacetSearchParams(iParams, new CountFacetRequest(new CategoryPath("a"), 10),
new CountFacetRequest(new CategoryPath("b"), 10));
FacetSearchParams fsp = new FacetSearchParams(iParams, new CountFacetRequest(new FacetLabel("a"), 10),
new CountFacetRequest(new FacetLabel("b"), 10));
IndexSearcher searcher = new IndexSearcher(indexReader);
FacetsCollector fc = FacetsCollector.create(fsp, indexReader, taxoReader);
searcher.search(new MatchAllDocsQuery(), fc);
@ -352,7 +352,7 @@ public class TestTotalFacetCountsCache extends FacetTestCase {
// Make the taxonomy grow without touching the index
for (int i = 0; i < 10; i++) {
taxoWriter.addCategory(new CategoryPath("foo", Integer.toString(i)));
taxoWriter.addCategory(new FacetLabel("foo", Integer.toString(i)));
}
taxoWriter.commit();
TaxonomyReader newTaxoReader = TaxonomyReader.openIfChanged(taxoReader);

View File

@ -4,7 +4,7 @@ import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.search.DrillDownQuery;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.util.PartitionsUtils;
import org.apache.lucene.index.Term;
import org.junit.Test;
@ -33,11 +33,11 @@ public class FacetIndexingParamsTest extends FacetTestCase {
FacetIndexingParams dfip = FacetIndexingParams.DEFAULT;
assertNotNull("Missing default category list", dfip.getAllCategoryListParams());
assertEquals("all categories have the same CategoryListParams by default",
dfip.getCategoryListParams(null), dfip.getCategoryListParams(new CategoryPath("a")));
dfip.getCategoryListParams(null), dfip.getCategoryListParams(new FacetLabel("a")));
assertEquals("Expected default category list field is $facets", "$facets", dfip.getCategoryListParams(null).field);
String expectedDDText = "a"
+ dfip.getFacetDelimChar() + "b";
CategoryPath cp = new CategoryPath("a", "b");
FacetLabel cp = new FacetLabel("a", "b");
assertEquals("wrong drill-down term", new Term("$facets",
expectedDDText), DrillDownQuery.term(dfip,cp));
char[] buf = new char[20];

View File

@ -7,7 +7,7 @@ import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.PerDimensionIndexingParams;
import org.apache.lucene.facet.search.DrillDownQuery;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.util.PartitionsUtils;
import org.apache.lucene.index.Term;
import org.junit.Test;
@ -33,11 +33,11 @@ public class PerDimensionIndexingParamsTest extends FacetTestCase {
@Test
public void testTopLevelSettings() {
FacetIndexingParams ifip = new PerDimensionIndexingParams(Collections.<CategoryPath, CategoryListParams>emptyMap());
FacetIndexingParams ifip = new PerDimensionIndexingParams(Collections.<FacetLabel, CategoryListParams>emptyMap());
assertNotNull("Missing default category list", ifip.getAllCategoryListParams());
assertEquals("Expected default category list field is $facets", "$facets", ifip.getCategoryListParams(null).field);
String expectedDDText = "a" + ifip.getFacetDelimChar() + "b";
CategoryPath cp = new CategoryPath("a", "b");
FacetLabel cp = new FacetLabel("a", "b");
assertEquals("wrong drill-down term", new Term("$facets", expectedDDText), DrillDownQuery.term(ifip,cp));
char[] buf = new char[20];
int numchars = ifip.drillDownTermText(cp, buf);
@ -55,9 +55,9 @@ public class PerDimensionIndexingParamsTest extends FacetTestCase {
public void testCategoryListParamsAddition() {
CategoryListParams clp = new CategoryListParams("clp");
PerDimensionIndexingParams tlfip = new PerDimensionIndexingParams(
Collections.<CategoryPath,CategoryListParams> singletonMap(new CategoryPath("a"), clp));
Collections.<FacetLabel,CategoryListParams> singletonMap(new FacetLabel("a"), clp));
assertEquals("Expected category list field is " + clp.field,
clp.field, tlfip.getCategoryListParams(new CategoryPath("a")).field);
clp.field, tlfip.getCategoryListParams(new FacetLabel("a")).field);
assertNotSame("Unexpected default category list " + clp.field, clp, tlfip.getCategoryListParams(null));
}

View File

@ -47,7 +47,7 @@ import org.apache.lucene.facet.search.FacetsAccumulator;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesFacetFields;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -114,11 +114,11 @@ public class TestRangeAccumulator extends FacetTestCase {
// For drill down by numeric range:
doc.add(new LongField("field", l, Field.Store.NO));
CategoryPath cp;
FacetLabel cp;
if ((l&3) == 0) {
cp = new CategoryPath("dim", "a");
cp = new FacetLabel("dim", "a");
} else {
cp = new CategoryPath("dim", "b");
cp = new FacetLabel("dim", "b");
}
ff.addFields(doc, Collections.singletonList(cp));
w.addDocument(doc);
@ -132,7 +132,7 @@ public class TestRangeAccumulator extends FacetTestCase {
IndexSearcher s = newSearcher(r);
final CountFacetRequest countRequest = new CountFacetRequest(new CategoryPath("dim"), 2);
final CountFacetRequest countRequest = new CountFacetRequest(new FacetLabel("dim"), 2);
final RangeFacetRequest<LongRange> rangeRequest = new RangeFacetRequest<LongRange>("field",
new LongRange("less than 10", 0L, true, 10L, false),
new LongRange("less than or equal to 10", 0L, true, 10L, true),
@ -183,7 +183,7 @@ public class TestRangeAccumulator extends FacetTestCase {
// Second search, drill down on dim=b:
ddq = new DrillDownQuery(FacetIndexingParams.DEFAULT, new MatchAllDocsQuery());
ddq.add(new CategoryPath("dim", "b"));
ddq.add(new FacetLabel("dim", "b"));
dimSeen.clear();
dsr = ds.search(null, ddq, 10, fsp);
@ -221,11 +221,11 @@ public class TestRangeAccumulator extends FacetTestCase {
// For drill down by numeric range:
doc.add(new LongField("field", l, Field.Store.NO));
CategoryPath cp;
FacetLabel cp;
if ((l&3) == 0) {
cp = new CategoryPath("dim", "a");
cp = new FacetLabel("dim", "a");
} else {
cp = new CategoryPath("dim", "b");
cp = new FacetLabel("dim", "b");
}
ff.addFields(doc, Collections.singletonList(cp));
w.addDocument(doc);
@ -237,7 +237,7 @@ public class TestRangeAccumulator extends FacetTestCase {
IndexSearcher s = newSearcher(r);
final SortedSetDocValuesReaderState state = new SortedSetDocValuesReaderState(s.getIndexReader());
final CountFacetRequest countRequest = new CountFacetRequest(new CategoryPath("dim"), 2);
final CountFacetRequest countRequest = new CountFacetRequest(new FacetLabel("dim"), 2);
final RangeFacetRequest<LongRange> rangeRequest = new RangeFacetRequest<LongRange>("field",
new LongRange("less than 10", 0L, true, 10L, false),
new LongRange("less than or equal to 10", 0L, true, 10L, true),
@ -288,7 +288,7 @@ public class TestRangeAccumulator extends FacetTestCase {
// Second search, drill down on dim=b:
ddq = new DrillDownQuery(FacetIndexingParams.DEFAULT, new MatchAllDocsQuery());
ddq.add(new CategoryPath("dim", "b"));
ddq.add(new FacetLabel("dim", "b"));
dimSeen.clear();
dsr = ds.search(null, ddq, 10, fsp);

View File

@ -15,7 +15,7 @@ import org.apache.lucene.facet.search.FacetRequest.ResultMode;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -63,7 +63,7 @@ public class OversampleWithDepthTest extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(indexDir);
TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir);
CountFacetRequest facetRequest = new CountFacetRequest(new CategoryPath("root"), 10);
CountFacetRequest facetRequest = new CountFacetRequest(new FacetLabel("root"), 10);
// Setting the depth to '2', should potentially get all categories
facetRequest.setDepth(2);
facetRequest.setResultMode(ResultMode.PER_NODE_IN_TREE);
@ -100,7 +100,7 @@ public class OversampleWithDepthTest extends FacetTestCase {
FacetFields facetFields = new FacetFields(tw, fip);
for (int i = 0; i < 100; i++) {
Document doc = new Document();
CategoryPath cp = new CategoryPath("root",Integer.toString(i / 10), Integer.toString(i));
FacetLabel cp = new FacetLabel("root",Integer.toString(i / 10), Integer.toString(i));
facetFields.addFields(doc, Collections.singletonList(cp));
w.addDocument(doc);
}

View File

@ -10,7 +10,7 @@ import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.CountFacetRequest;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.junit.After;
import org.junit.Before;
@ -50,10 +50,10 @@ public class SamplerTest extends FacetTestBase {
}
@Override
protected List<CategoryPath> getCategories(final int doc) {
return new ArrayList<CategoryPath>() {
protected List<FacetLabel> getCategories(final int doc) {
return new ArrayList<FacetLabel>() {
{
add(new CategoryPath("root", "a", Integer.toString(doc % 10)));
add(new FacetLabel("root", "a", Integer.toString(doc % 10)));
}
};
}
@ -93,7 +93,7 @@ public class SamplerTest extends FacetTestBase {
assertNull("Fixer should be null as the test is for no-fixing",
sp.getSampleFixer());
FacetSearchParams fsp = new FacetSearchParams(fip, new CountFacetRequest(
new CategoryPath("root", "a"), 1));
new FacetLabel("root", "a"), 1));
SamplingAccumulator accumulator = new SamplingAccumulator(
new RandomSampler(sp, random()), fsp, indexReader, taxoReader);

View File

@ -13,7 +13,7 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.facet.FacetTestBase;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
/*
@ -78,9 +78,9 @@ public abstract class BaseTestTopK extends FacetTestBase {
}
@Override
protected List<CategoryPath> getCategories(int doc) {
protected List<FacetLabel> getCategories(int doc) {
nextInt(doc);
CategoryPath cp = new CategoryPath(
FacetLabel cp = new FacetLabel(
"a",
Integer.toString(nextInt / 1000),
Integer.toString(nextInt / 100),
@ -93,10 +93,10 @@ public abstract class BaseTestTopK extends FacetTestBase {
protected FacetSearchParams searchParamsWithRequests(int numResults, FacetIndexingParams fip) {
List<FacetRequest> facetRequests = new ArrayList<FacetRequest>();
facetRequests.add(new CountFacetRequest(new CategoryPath("a"), numResults));
facetRequests.add(new CountFacetRequest(new CategoryPath("a", "1"), numResults));
facetRequests.add(new CountFacetRequest(new CategoryPath("a", "1", "10"), numResults));
facetRequests.add(new CountFacetRequest(new CategoryPath("a", "2", "26", "267"), numResults));
facetRequests.add(new CountFacetRequest(new FacetLabel("a"), numResults));
facetRequests.add(new CountFacetRequest(new FacetLabel("a", "1"), numResults));
facetRequests.add(new CountFacetRequest(new FacetLabel("a", "1", "10"), numResults));
facetRequests.add(new CountFacetRequest(new FacetLabel("a", "2", "26", "267"), numResults));
return getFacetSearchParams(facetRequests, fip);
}

View File

@ -21,7 +21,7 @@ import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.params.PerDimensionOrdinalPolicy;
import org.apache.lucene.facet.params.CategoryListParams.OrdinalPolicy;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -61,38 +61,38 @@ import org.junit.Test;
public class CountingFacetsAggregatorTest extends FacetTestCase {
private static final Term A = new Term("f", "a");
private static final CategoryPath CP_A = new CategoryPath("A"), CP_B = new CategoryPath("B");
private static final CategoryPath CP_C = new CategoryPath("C"), CP_D = new CategoryPath("D"); // indexed w/ NO_PARENTS
private static final FacetLabel CP_A = new FacetLabel("A"), CP_B = new FacetLabel("B");
private static final FacetLabel CP_C = new FacetLabel("C"), CP_D = new FacetLabel("D"); // indexed w/ NO_PARENTS
private static final int NUM_CHILDREN_CP_A = 5, NUM_CHILDREN_CP_B = 3;
private static final int NUM_CHILDREN_CP_C = 5, NUM_CHILDREN_CP_D = 5;
private static final CategoryPath[] CATEGORIES_A, CATEGORIES_B;
private static final CategoryPath[] CATEGORIES_C, CATEGORIES_D;
private static final FacetLabel[] CATEGORIES_A, CATEGORIES_B;
private static final FacetLabel[] CATEGORIES_C, CATEGORIES_D;
static {
CATEGORIES_A = new CategoryPath[NUM_CHILDREN_CP_A];
CATEGORIES_A = new FacetLabel[NUM_CHILDREN_CP_A];
for (int i = 0; i < NUM_CHILDREN_CP_A; i++) {
CATEGORIES_A[i] = new CategoryPath(CP_A.components[0], Integer.toString(i));
CATEGORIES_A[i] = new FacetLabel(CP_A.components[0], Integer.toString(i));
}
CATEGORIES_B = new CategoryPath[NUM_CHILDREN_CP_B];
CATEGORIES_B = new FacetLabel[NUM_CHILDREN_CP_B];
for (int i = 0; i < NUM_CHILDREN_CP_B; i++) {
CATEGORIES_B[i] = new CategoryPath(CP_B.components[0], Integer.toString(i));
CATEGORIES_B[i] = new FacetLabel(CP_B.components[0], Integer.toString(i));
}
// NO_PARENTS categories
CATEGORIES_C = new CategoryPath[NUM_CHILDREN_CP_C];
CATEGORIES_C = new FacetLabel[NUM_CHILDREN_CP_C];
for (int i = 0; i < NUM_CHILDREN_CP_C; i++) {
CATEGORIES_C[i] = new CategoryPath(CP_C.components[0], Integer.toString(i));
CATEGORIES_C[i] = new FacetLabel(CP_C.components[0], Integer.toString(i));
}
// Multi-level categories
CATEGORIES_D = new CategoryPath[NUM_CHILDREN_CP_D];
CATEGORIES_D = new FacetLabel[NUM_CHILDREN_CP_D];
for (int i = 0; i < NUM_CHILDREN_CP_D; i++) {
String val = Integer.toString(i);
CATEGORIES_D[i] = new CategoryPath(CP_D.components[0], val, val + val); // e.g. D/1/11, D/2/22...
CATEGORIES_D[i] = new FacetLabel(CP_D.components[0], val, val + val); // e.g. D/1/11, D/2/22...
}
}
private static Directory indexDir, taxoDir;
private static ObjectToIntMap<CategoryPath> allExpectedCounts, termExpectedCounts;
private static ObjectToIntMap<FacetLabel> allExpectedCounts, termExpectedCounts;
private static FacetIndexingParams fip;
@AfterClass
@ -100,19 +100,19 @@ public class CountingFacetsAggregatorTest extends FacetTestCase {
IOUtils.close(indexDir, taxoDir);
}
private static List<CategoryPath> randomCategories(Random random) {
private static List<FacetLabel> randomCategories(Random random) {
// add random categories from the two dimensions, ensuring that the same
// category is not added twice.
int numFacetsA = random.nextInt(3) + 1; // 1-3
int numFacetsB = random.nextInt(2) + 1; // 1-2
ArrayList<CategoryPath> categories_a = new ArrayList<CategoryPath>();
ArrayList<FacetLabel> categories_a = new ArrayList<FacetLabel>();
categories_a.addAll(Arrays.asList(CATEGORIES_A));
ArrayList<CategoryPath> categories_b = new ArrayList<CategoryPath>();
ArrayList<FacetLabel> categories_b = new ArrayList<FacetLabel>();
categories_b.addAll(Arrays.asList(CATEGORIES_B));
Collections.shuffle(categories_a, random);
Collections.shuffle(categories_b, random);
ArrayList<CategoryPath> categories = new ArrayList<CategoryPath>();
ArrayList<FacetLabel> categories = new ArrayList<FacetLabel>();
categories.addAll(categories_a.subList(0, numFacetsA));
categories.addAll(categories_b.subList(0, numFacetsB));
@ -129,8 +129,8 @@ public class CountingFacetsAggregatorTest extends FacetTestCase {
private static void addFacets(Document doc, FacetFields facetFields, boolean updateTermExpectedCounts)
throws IOException {
List<CategoryPath> docCategories = randomCategories(random());
for (CategoryPath cp : docCategories) {
List<FacetLabel> docCategories = randomCategories(random());
for (FacetLabel cp : docCategories) {
if (cp.components[0].equals(CP_D.components[0])) {
cp = cp.subpath(2); // we'll get counts for the 2nd level only
}
@ -163,7 +163,7 @@ public class CountingFacetsAggregatorTest extends FacetTestCase {
}
private static void indexDocsWithFacetsNoTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter,
ObjectToIntMap<CategoryPath> expectedCounts) throws IOException {
ObjectToIntMap<FacetLabel> expectedCounts) throws IOException {
Random random = random();
int numDocs = atLeast(random, 2);
FacetFields facetFields = new FacetFields(taxoWriter, fip);
@ -176,7 +176,7 @@ public class CountingFacetsAggregatorTest extends FacetTestCase {
}
private static void indexDocsWithFacetsAndTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter,
ObjectToIntMap<CategoryPath> expectedCounts) throws IOException {
ObjectToIntMap<FacetLabel> expectedCounts) throws IOException {
Random random = random();
int numDocs = atLeast(random, 2);
FacetFields facetFields = new FacetFields(taxoWriter, fip);
@ -190,7 +190,7 @@ public class CountingFacetsAggregatorTest extends FacetTestCase {
}
private static void indexDocsWithFacetsAndSomeTerms(IndexWriter indexWriter, TaxonomyWriter taxoWriter,
ObjectToIntMap<CategoryPath> expectedCounts) throws IOException {
ObjectToIntMap<FacetLabel> expectedCounts) throws IOException {
Random random = random();
int numDocs = atLeast(random, 2);
FacetFields facetFields = new FacetFields(taxoWriter, fip);
@ -207,22 +207,22 @@ public class CountingFacetsAggregatorTest extends FacetTestCase {
}
// initialize expectedCounts w/ 0 for all categories
private static ObjectToIntMap<CategoryPath> newCounts() {
ObjectToIntMap<CategoryPath> counts = new ObjectToIntMap<CategoryPath>();
private static ObjectToIntMap<FacetLabel> newCounts() {
ObjectToIntMap<FacetLabel> counts = new ObjectToIntMap<FacetLabel>();
counts.put(CP_A, 0);
counts.put(CP_B, 0);
counts.put(CP_C, 0);
counts.put(CP_D, 0);
for (CategoryPath cp : CATEGORIES_A) {
for (FacetLabel cp : CATEGORIES_A) {
counts.put(cp, 0);
}
for (CategoryPath cp : CATEGORIES_B) {
for (FacetLabel cp : CATEGORIES_B) {
counts.put(cp, 0);
}
for (CategoryPath cp : CATEGORIES_C) {
for (FacetLabel cp : CATEGORIES_C) {
counts.put(cp, 0);
}
for (CategoryPath cp : CATEGORIES_D) {
for (FacetLabel cp : CATEGORIES_D) {
counts.put(cp.subpath(2), 0);
}
return counts;

View File

@ -33,7 +33,7 @@ import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.PerDimensionIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -83,7 +83,7 @@ public class DrillDownQueryTest extends FacetTestCase {
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
for (int i = 0; i < 100; i++) {
ArrayList<CategoryPath> paths = new ArrayList<CategoryPath>();
ArrayList<FacetLabel> paths = new ArrayList<FacetLabel>();
Document doc = new Document();
if (i % 2 == 0) { // 50
doc.add(new TextField("content", "foo", Field.Store.NO));
@ -93,13 +93,13 @@ public class DrillDownQueryTest extends FacetTestCase {
}
if (i % 4 == 0) { // 25
if (r.nextBoolean()) {
paths.add(new CategoryPath("a/1", '/'));
paths.add(new FacetLabel("a/1", '/'));
} else {
paths.add(new CategoryPath("a/2", '/'));
paths.add(new FacetLabel("a/2", '/'));
}
}
if (i % 5 == 0) { // 20
paths.add(new CategoryPath("b"));
paths.add(new FacetLabel("b"));
}
FacetFields facetFields = new FacetFields(taxoWriter);
if (paths.size() > 0) {
@ -116,9 +116,9 @@ public class DrillDownQueryTest extends FacetTestCase {
}
public DrillDownQueryTest() {
Map<CategoryPath,CategoryListParams> paramsMap = new HashMap<CategoryPath,CategoryListParams>();
paramsMap.put(new CategoryPath("a"), randomCategoryListParams("testing_facets_a"));
paramsMap.put(new CategoryPath("b"), randomCategoryListParams("testing_facets_b"));
Map<FacetLabel,CategoryListParams> paramsMap = new HashMap<FacetLabel,CategoryListParams>();
paramsMap.put(new FacetLabel("a"), randomCategoryListParams("testing_facets_a"));
paramsMap.put(new FacetLabel("b"), randomCategoryListParams("testing_facets_b"));
nonDefaultParams = new PerDimensionIndexingParams(paramsMap);
defaultParams = new FacetIndexingParams(randomCategoryListParams(CategoryListParams.DEFAULT_FIELD));
}
@ -127,10 +127,10 @@ public class DrillDownQueryTest extends FacetTestCase {
public void testDefaultField() {
String defaultField = CategoryListParams.DEFAULT_FIELD;
Term termA = DrillDownQuery.term(defaultParams, new CategoryPath("a"));
Term termA = DrillDownQuery.term(defaultParams, new FacetLabel("a"));
assertEquals(new Term(defaultField, "a"), termA);
Term termB = DrillDownQuery.term(defaultParams, new CategoryPath("b"));
Term termB = DrillDownQuery.term(defaultParams, new FacetLabel("b"));
assertEquals(new Term(defaultField, "b"), termB);
}
@ -140,8 +140,8 @@ public class DrillDownQueryTest extends FacetTestCase {
// test (a/1 OR a/2) AND b
DrillDownQuery q = new DrillDownQuery(defaultParams);
q.add(new CategoryPath("a/1", '/'), new CategoryPath("a/2", '/'));
q.add(new CategoryPath("b"));
q.add(new FacetLabel("a/1", '/'), new FacetLabel("a/2", '/'));
q.add(new FacetLabel("b"));
TopDocs docs = searcher.search(q, 100);
assertEquals(5, docs.totalHits);
}
@ -152,7 +152,7 @@ public class DrillDownQueryTest extends FacetTestCase {
// Making sure the query yields 25 documents with the facet "a"
DrillDownQuery q = new DrillDownQuery(defaultParams);
q.add(new CategoryPath("a"));
q.add(new FacetLabel("a"));
QueryUtils.check(q);
TopDocs docs = searcher.search(q, 100);
assertEquals(25, docs.totalHits);
@ -160,14 +160,14 @@ public class DrillDownQueryTest extends FacetTestCase {
// Making sure the query yields 5 documents with the facet "b" and the
// previous (facet "a") query as a base query
DrillDownQuery q2 = new DrillDownQuery(defaultParams, q);
q2.add(new CategoryPath("b"));
q2.add(new FacetLabel("b"));
docs = searcher.search(q2, 100);
assertEquals(5, docs.totalHits);
// Making sure that a query of both facet "a" and facet "b" yields 5 results
DrillDownQuery q3 = new DrillDownQuery(defaultParams);
q3.add(new CategoryPath("a"));
q3.add(new CategoryPath("b"));
q3.add(new FacetLabel("a"));
q3.add(new FacetLabel("b"));
docs = searcher.search(q3, 100);
assertEquals(5, docs.totalHits);
@ -175,7 +175,7 @@ public class DrillDownQueryTest extends FacetTestCase {
// would gather together 10 results (10%..)
Query fooQuery = new TermQuery(new Term("content", "foo"));
DrillDownQuery q4 = new DrillDownQuery(defaultParams, fooQuery);
q4.add(new CategoryPath("b"));
q4.add(new FacetLabel("b"));
docs = searcher.search(q4, 100);
assertEquals(10, docs.totalHits);
}
@ -186,12 +186,12 @@ public class DrillDownQueryTest extends FacetTestCase {
// Create the base query to start with
DrillDownQuery q = new DrillDownQuery(defaultParams);
q.add(new CategoryPath("a"));
q.add(new FacetLabel("a"));
// Making sure the query yields 5 documents with the facet "b" and the
// previous (facet "a") query as a base query
DrillDownQuery q2 = new DrillDownQuery(defaultParams, q);
q2.add(new CategoryPath("b"));
q2.add(new FacetLabel("b"));
TopDocs docs = searcher.search(q2, 100);
assertEquals(5, docs.totalHits);
@ -199,7 +199,7 @@ public class DrillDownQueryTest extends FacetTestCase {
// would gather together 10 results (10%..)
Query fooQuery = new TermQuery(new Term("content", "foo"));
DrillDownQuery q4 = new DrillDownQuery(defaultParams, fooQuery);
q4.add(new CategoryPath("b"));
q4.add(new FacetLabel("b"));
docs = searcher.search(q4, 100);
assertEquals(10, docs.totalHits);
}
@ -219,7 +219,7 @@ public class DrillDownQueryTest extends FacetTestCase {
// create a drill-down query with category "a", scores should not change
DrillDownQuery q2 = new DrillDownQuery(defaultParams, q);
q2.add(new CategoryPath("a"));
q2.add(new FacetLabel("a"));
docs = searcher.search(q2, reader.maxDoc()); // fetch all available docs to this query
for (ScoreDoc sd : docs.scoreDocs) {
assertEquals("score of doc=" + sd.doc + " modified", scores[sd.doc], sd.score, 0f);
@ -232,7 +232,7 @@ public class DrillDownQueryTest extends FacetTestCase {
IndexSearcher searcher = newSearcher(reader);
DrillDownQuery q = new DrillDownQuery(defaultParams);
q.add(new CategoryPath("a"));
q.add(new FacetLabel("a"));
TopDocs docs = searcher.search(q, reader.maxDoc()); // fetch all available docs to this query
for (ScoreDoc sd : docs.scoreDocs) {
assertEquals(0f, sd.score, 0f);
@ -241,20 +241,20 @@ public class DrillDownQueryTest extends FacetTestCase {
@Test
public void testTermNonDefault() {
Term termA = DrillDownQuery.term(nonDefaultParams, new CategoryPath("a"));
Term termA = DrillDownQuery.term(nonDefaultParams, new FacetLabel("a"));
assertEquals(new Term("testing_facets_a", "a"), termA);
Term termB = DrillDownQuery.term(nonDefaultParams, new CategoryPath("b"));
Term termB = DrillDownQuery.term(nonDefaultParams, new FacetLabel("b"));
assertEquals(new Term("testing_facets_b", "b"), termB);
}
@Test
public void testClone() throws Exception {
DrillDownQuery q = new DrillDownQuery(defaultParams, new MatchAllDocsQuery());
q.add(new CategoryPath("a"));
q.add(new FacetLabel("a"));
DrillDownQuery clone = q.clone();
clone.add(new CategoryPath("b"));
clone.add(new FacetLabel("b"));
assertFalse("query wasn't cloned: source=" + q + " clone=" + clone, q.toString().equals(clone.toString()));
}

View File

@ -2,7 +2,7 @@ package org.apache.lucene.facet.search;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.search.CountFacetRequest;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.junit.Test;
/*
@ -26,7 +26,7 @@ public class FacetRequestTest extends FacetTestCase {
@Test(expected=IllegalArgumentException.class)
public void testIllegalNumResults() throws Exception {
assertNotNull(new CountFacetRequest(new CategoryPath("a", "b"), 0));
assertNotNull(new CountFacetRequest(new FacetLabel("a", "b"), 0));
}
@Test(expected=IllegalArgumentException.class)
@ -36,8 +36,8 @@ public class FacetRequestTest extends FacetTestCase {
@Test
public void testHashAndEquals() {
CountFacetRequest fr1 = new CountFacetRequest(new CategoryPath("a"), 8);
CountFacetRequest fr2 = new CountFacetRequest(new CategoryPath("a"), 8);
CountFacetRequest fr1 = new CountFacetRequest(new FacetLabel("a"), 8);
CountFacetRequest fr2 = new CountFacetRequest(new FacetLabel("a"), 8);
assertEquals("hashCode() should agree on both objects", fr1.hashCode(), fr2.hashCode());
assertTrue("equals() should return true", fr1.equals(fr2));
fr1.setDepth(10);

View File

@ -32,7 +32,7 @@ import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.DrillSideways.DrillSidewaysResult;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -51,9 +51,9 @@ public class FacetResultTest extends FacetTestCase {
private Document newDocument(FacetFields facetFields, String... categories) throws IOException {
Document doc = new Document();
List<CategoryPath> cats = new ArrayList<CategoryPath>();
List<FacetLabel> cats = new ArrayList<FacetLabel>();
for (String cat : categories) {
cats.add(new CategoryPath(cat, '/'));
cats.add(new FacetLabel(cat, '/'));
}
facetFields.addFields(doc, cats);
return doc;
@ -80,22 +80,22 @@ public class FacetResultTest extends FacetTestCase {
private void searchIndex(TaxonomyReader taxoReader, IndexSearcher searcher, boolean fillMissingCounts, String[] exp,
String[][] drillDowns, int[] numResults) throws IOException {
CategoryPath[][] cps = new CategoryPath[drillDowns.length][];
FacetLabel[][] cps = new FacetLabel[drillDowns.length][];
for (int i = 0; i < cps.length; i++) {
cps[i] = new CategoryPath[drillDowns[i].length];
cps[i] = new FacetLabel[drillDowns[i].length];
for (int j = 0; j < cps[i].length; j++) {
cps[i][j] = new CategoryPath(drillDowns[i][j], '/');
cps[i][j] = new FacetLabel(drillDowns[i][j], '/');
}
}
DrillDownQuery ddq = new DrillDownQuery(FacetIndexingParams.DEFAULT, new MatchAllDocsQuery());
for (CategoryPath[] cats : cps) {
for (FacetLabel[] cats : cps) {
ddq.add(cats);
}
List<FacetRequest> facetRequests = new ArrayList<FacetRequest>();
for (CategoryPath[] cats : cps) {
for (FacetLabel[] cats : cps) {
for (int i = 0; i < cats.length; i++) {
CategoryPath cp = cats[i];
FacetLabel cp = cats[i];
int numres = numResults == null ? 2 : numResults[i];
// for each drill-down, add itself as well as its parent as requests, so
// we get the drill-sideways

View File

@ -12,7 +12,7 @@ import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.PerDimensionIndexingParams;
import org.apache.lucene.facet.search.CategoryListIterator;
import org.apache.lucene.facet.search.DocValuesCategoryListIterator;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -55,9 +55,9 @@ public class MultiCategoryListIteratorTest extends FacetTestCase {
}
// build the PerDimensionIndexingParams
HashMap<CategoryPath,CategoryListParams> clps = new HashMap<CategoryPath,CategoryListParams>();
HashMap<FacetLabel,CategoryListParams> clps = new HashMap<FacetLabel,CategoryListParams>();
for (String dim : dimensions) {
CategoryPath cp = new CategoryPath(dim);
FacetLabel cp = new FacetLabel(dim);
CategoryListParams clp = randomCategoryListParams("$" + dim);
clps.put(cp, clp);
}
@ -73,10 +73,10 @@ public class MultiCategoryListIteratorTest extends FacetTestCase {
for (int i = 0; i < ndocs; i++) {
Document doc = new Document();
int numCategories = random.nextInt(numDimensions) + 1;
ArrayList<CategoryPath> categories = new ArrayList<CategoryPath>();
ArrayList<FacetLabel> categories = new ArrayList<FacetLabel>();
for (int j = 0; j < numCategories; j++) {
String dimension = dimensions[random.nextInt(dimensions.length)];
categories.add(new CategoryPath(dimension, Integer.toString(i)));
categories.add(new FacetLabel(dimension, Integer.toString(i)));
}
facetFields.addFields(doc, categories);
indexWriter.addDocument(doc);
@ -88,7 +88,7 @@ public class MultiCategoryListIteratorTest extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
CategoryListIterator[] iterators = new CategoryListIterator[numDimensions];
for (int i = 0; i < iterators.length; i++) {
CategoryListParams clp = indexingParams.getCategoryListParams(new CategoryPath(dimensions[i]));
CategoryListParams clp = indexingParams.getCategoryListParams(new FacetLabel(dimensions[i]));
IntDecoder decoder = clp.createEncoder().createMatchingDecoder();
iterators[i] = new DocValuesCategoryListIterator(clp.field, decoder);
}
@ -101,7 +101,7 @@ public class MultiCategoryListIteratorTest extends FacetTestCase {
cli.getOrdinals(i, ordinals);
assertTrue("document " + i + " does not have categories", ordinals.length > 0);
for (int j = 0; j < ordinals.length; j++) {
CategoryPath cp = taxoReader.getPath(ordinals.ints[j]);
FacetLabel cp = taxoReader.getPath(ordinals.ints[j]);
assertNotNull("ordinal " + ordinals.ints[j] + " not found in taxonomy", cp);
if (cp.length == 2) {
int globalDoc = i + context.docBase;

View File

@ -8,7 +8,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DirectoryReader;
@ -48,10 +48,10 @@ public class OrdinalsCacheTest extends FacetTestCase {
FacetFields facetFields = new FacetFields(taxoWriter);
Document doc = new Document();
facetFields.addFields(doc, Arrays.asList(new CategoryPath("A", "1")));
facetFields.addFields(doc, Arrays.asList(new FacetLabel("A", "1")));
writer.addDocument(doc);
doc = new Document();
facetFields.addFields(doc, Arrays.asList(new CategoryPath("A", "2")));
facetFields.addFields(doc, Arrays.asList(new FacetLabel("A", "2")));
writer.addDocument(doc);
writer.close();
taxoWriter.close();
@ -64,7 +64,7 @@ public class OrdinalsCacheTest extends FacetTestCase {
public void run() {
for (AtomicReaderContext context : reader.leaves()) {
try {
OrdinalsCache.getCachedOrds(context, FacetIndexingParams.DEFAULT.getCategoryListParams(new CategoryPath("A")));
OrdinalsCache.getCachedOrds(context, FacetIndexingParams.DEFAULT.getCategoryListParams(new FacetLabel("A")));
} catch (IOException e) {
throw new RuntimeException(e);
}

View File

@ -35,7 +35,7 @@ import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -62,9 +62,9 @@ public class TestDemoFacets extends FacetTestCase {
private void add(String ... categoryPaths) throws IOException {
Document doc = new Document();
List<CategoryPath> paths = new ArrayList<CategoryPath>();
List<FacetLabel> paths = new ArrayList<FacetLabel>();
for(String categoryPath : categoryPaths) {
paths.add(new CategoryPath(categoryPath, '/'));
paths.add(new FacetLabel(categoryPath, '/'));
}
facetFields.addFields(doc, paths);
writer.addDocument(doc);
@ -99,8 +99,8 @@ public class TestDemoFacets extends FacetTestCase {
// Count both "Publish Date" and "Author" dimensions:
FacetSearchParams fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("Publish Date"), 10),
new CountFacetRequest(new CategoryPath("Author"), 10));
new CountFacetRequest(new FacetLabel("Publish Date"), 10),
new CountFacetRequest(new FacetLabel("Author"), 10));
// Aggregate the facet counts:
FacetsCollector c = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
@ -121,9 +121,9 @@ public class TestDemoFacets extends FacetTestCase {
// Now user drills down on Publish Date/2010:
fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("Author"), 10));
fsp = new FacetSearchParams(new CountFacetRequest(new FacetLabel("Author"), 10));
DrillDownQuery q2 = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
q2.add(new CategoryPath("Publish Date/2010", '/'));
q2.add(new FacetLabel("Publish Date/2010", '/'));
c = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
searcher.search(q2, c);
results = c.getFacetResults();
@ -167,7 +167,7 @@ public class TestDemoFacets extends FacetTestCase {
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("a/path", '/')));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel("a/path", '/')));
writer.addDocument(doc);
writer.close();
taxoWriter.close();
@ -193,7 +193,7 @@ public class TestDemoFacets extends FacetTestCase {
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("a/path", '/')));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel("a/path", '/')));
writer.addDocument(doc);
// NRT open
@ -205,7 +205,7 @@ public class TestDemoFacets extends FacetTestCase {
taxoWriter.close();
FacetSearchParams fsp = new FacetSearchParams(fip,
new CountFacetRequest(new CategoryPath("a", '/'), 10));
new CountFacetRequest(new FacetLabel("a", '/'), 10));
// Aggregate the facet counts:
FacetsCollector c = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
@ -241,7 +241,7 @@ public class TestDemoFacets extends FacetTestCase {
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
BytesRef br = new BytesRef(new byte[] {(byte) 0xee, (byte) 0x92, (byte) 0xaa, (byte) 0xef, (byte) 0x9d, (byte) 0x89});
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("dim/" + br.utf8ToString(), '/')));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel("dim/" + br.utf8ToString(), '/')));
try {
writer.addDocument(doc);
} catch (IllegalArgumentException iae) {
@ -269,9 +269,9 @@ public class TestDemoFacets extends FacetTestCase {
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
List<CategoryPath> paths = new ArrayList<CategoryPath>();
List<FacetLabel> paths = new ArrayList<FacetLabel>();
for (int i = 0; i < numLabels; i++) {
paths.add(new CategoryPath("dim", "" + i));
paths.add(new FacetLabel("dim", "" + i));
}
facetFields.addFields(doc, paths);
writer.addDocument(doc);
@ -284,7 +284,7 @@ public class TestDemoFacets extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
taxoWriter.close();
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("dim"), Integer.MAX_VALUE));
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new FacetLabel("dim"), Integer.MAX_VALUE));
// Aggregate the facet counts:
FacetsCollector c = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);

View File

@ -39,7 +39,7 @@ import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.DrillSideways.DrillSidewaysResult;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesFacetFields;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -79,9 +79,9 @@ public class TestDrillSideways extends FacetTestCase {
private void add(String ... categoryPaths) throws IOException {
Document doc = new Document();
List<CategoryPath> paths = new ArrayList<CategoryPath>();
List<FacetLabel> paths = new ArrayList<FacetLabel>();
for(String categoryPath : categoryPaths) {
paths.add(new CategoryPath(categoryPath, '/'));
paths.add(new FacetLabel(categoryPath, '/'));
}
facetFields.addFields(doc, paths);
writer.addDocument(doc);
@ -119,8 +119,8 @@ public class TestDrillSideways extends FacetTestCase {
// Count both "Publish Date" and "Author" dimensions, in
// drill-down:
FacetSearchParams fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("Publish Date"), 10),
new CountFacetRequest(new CategoryPath("Author"), 10));
new CountFacetRequest(new FacetLabel("Publish Date"), 10),
new CountFacetRequest(new FacetLabel("Author"), 10));
DrillSideways ds = new DrillSideways(searcher, taxoReader);
@ -128,7 +128,7 @@ public class TestDrillSideways extends FacetTestCase {
// case the drill-sideways + drill-down counts ==
// drill-down of just the query:
DrillDownQuery ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
ddq.add(new CategoryPath("Author", "Lisa"));
ddq.add(new FacetLabel("Author", "Lisa"));
DrillSidewaysResult r = ds.search(null, ddq, 10, fsp);
assertEquals(2, r.hits.totalHits);
@ -146,7 +146,7 @@ public class TestDrillSideways extends FacetTestCase {
// drill-sideways + drill-down counts == drill-down of
// just the query:
ddq = new DrillDownQuery(fsp.indexingParams);
ddq.add(new CategoryPath("Author", "Lisa"));
ddq.add(new FacetLabel("Author", "Lisa"));
r = ds.search(null, ddq, 10, fsp);
assertEquals(2, r.hits.totalHits);
@ -165,7 +165,7 @@ public class TestDrillSideways extends FacetTestCase {
// Another simple case: drill-down on on single fields
// but OR of two values
ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
ddq.add(new CategoryPath("Author", "Lisa"), new CategoryPath("Author", "Bob"));
ddq.add(new FacetLabel("Author", "Lisa"), new FacetLabel("Author", "Bob"));
r = ds.search(null, ddq, 10, fsp);
assertEquals(3, r.hits.totalHits);
assertEquals(2, r.facetResults.size());
@ -179,8 +179,8 @@ public class TestDrillSideways extends FacetTestCase {
// More interesting case: drill-down on two fields
ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
ddq.add(new CategoryPath("Author", "Lisa"));
ddq.add(new CategoryPath("Publish Date", "2010"));
ddq.add(new FacetLabel("Author", "Lisa"));
ddq.add(new FacetLabel("Publish Date", "2010"));
r = ds.search(null, ddq, 10, fsp);
assertEquals(1, r.hits.totalHits);
assertEquals(2, r.facetResults.size());
@ -196,9 +196,9 @@ public class TestDrillSideways extends FacetTestCase {
ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
// Drill down on Lisa or Bob:
ddq.add(new CategoryPath("Author", "Lisa"),
new CategoryPath("Author", "Bob"));
ddq.add(new CategoryPath("Publish Date", "2010"));
ddq.add(new FacetLabel("Author", "Lisa"),
new FacetLabel("Author", "Bob"));
ddq.add(new FacetLabel("Publish Date", "2010"));
r = ds.search(null, ddq, 10, fsp);
assertEquals(2, r.hits.totalHits);
assertEquals(2, r.facetResults.size());
@ -211,10 +211,10 @@ public class TestDrillSideways extends FacetTestCase {
// Test drilling down on invalid field:
ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
ddq.add(new CategoryPath("Foobar", "Baz"));
ddq.add(new FacetLabel("Foobar", "Baz"));
fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("Publish Date"), 10),
new CountFacetRequest(new CategoryPath("Foobar"), 10));
new CountFacetRequest(new FacetLabel("Publish Date"), 10),
new CountFacetRequest(new FacetLabel("Foobar"), 10));
r = ds.search(null, ddq, 10, fsp);
assertEquals(0, r.hits.totalHits);
assertEquals(2, r.facetResults.size());
@ -223,11 +223,11 @@ public class TestDrillSideways extends FacetTestCase {
// Test drilling down on valid term or'd with invalid term:
ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
ddq.add(new CategoryPath("Author", "Lisa"),
new CategoryPath("Author", "Tom"));
ddq.add(new FacetLabel("Author", "Lisa"),
new FacetLabel("Author", "Tom"));
fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("Publish Date"), 10),
new CountFacetRequest(new CategoryPath("Author"), 10));
new CountFacetRequest(new FacetLabel("Publish Date"), 10),
new CountFacetRequest(new FacetLabel("Author"), 10));
r = ds.search(null, ddq, 10, fsp);
assertEquals(2, r.hits.totalHits);
assertEquals(2, r.facetResults.size());
@ -242,10 +242,10 @@ public class TestDrillSideways extends FacetTestCase {
// LUCENE-4915: test drilling down on a dimension but
// NOT facet counting it:
ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
ddq.add(new CategoryPath("Author", "Lisa"),
new CategoryPath("Author", "Tom"));
ddq.add(new FacetLabel("Author", "Lisa"),
new FacetLabel("Author", "Tom"));
fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("Publish Date"), 10));
new CountFacetRequest(new FacetLabel("Publish Date"), 10));
r = ds.search(null, ddq, 10, fsp);
assertEquals(2, r.hits.totalHits);
assertEquals(1, r.facetResults.size());
@ -255,10 +255,10 @@ public class TestDrillSideways extends FacetTestCase {
// Test main query gets null scorer:
fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("Publish Date"), 10),
new CountFacetRequest(new CategoryPath("Author"), 10));
new CountFacetRequest(new FacetLabel("Publish Date"), 10),
new CountFacetRequest(new FacetLabel("Author"), 10));
ddq = new DrillDownQuery(fsp.indexingParams, new TermQuery(new Term("foobar", "baz")));
ddq.add(new CategoryPath("Author", "Lisa"));
ddq.add(new FacetLabel("Author", "Lisa"));
r = ds.search(null, ddq, 10, fsp);
assertEquals(0, r.hits.totalHits);
@ -304,11 +304,11 @@ public class TestDrillSideways extends FacetTestCase {
// Count both "Publish Date" and "Author" dimensions, in
// drill-down:
FacetSearchParams fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("Publish Date"), 10),
new CountFacetRequest(new CategoryPath("Author"), 10));
new CountFacetRequest(new FacetLabel("Publish Date"), 10),
new CountFacetRequest(new FacetLabel("Author"), 10));
DrillDownQuery ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
ddq.add(new CategoryPath("Author", "Lisa"));
ddq.add(new FacetLabel("Author", "Lisa"));
DrillSidewaysResult r = new DrillSideways(searcher, taxoReader).search(null, ddq, 10, fsp);
assertEquals(1, r.hits.totalHits);
@ -403,11 +403,11 @@ public class TestDrillSideways extends FacetTestCase {
// Two requests against the same dim:
FacetSearchParams fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("dim"), 10),
new CountFacetRequest(new CategoryPath("dim", "a"), 10));
new CountFacetRequest(new FacetLabel("dim"), 10),
new CountFacetRequest(new FacetLabel("dim", "a"), 10));
DrillDownQuery ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
ddq.add(new CategoryPath("dim", "a"));
ddq.add(new FacetLabel("dim", "a"));
DrillSidewaysResult r = new DrillSideways(searcher, taxoReader).search(null, ddq, 10, fsp);
assertEquals(3, r.hits.totalHits);
@ -532,7 +532,7 @@ public class TestDrillSideways extends FacetTestCase {
Document doc = new Document();
doc.add(newStringField("id", rawDoc.id, Field.Store.YES));
doc.add(newStringField("content", rawDoc.contentToken, Field.Store.NO));
List<CategoryPath> paths = new ArrayList<CategoryPath>();
List<FacetLabel> paths = new ArrayList<FacetLabel>();
if (VERBOSE) {
System.out.println(" doc id=" + rawDoc.id + " token=" + rawDoc.contentToken);
@ -540,7 +540,7 @@ public class TestDrillSideways extends FacetTestCase {
for(int dim=0;dim<numDims;dim++) {
int dimValue = rawDoc.dims[dim];
if (dimValue != -1) {
CategoryPath cp = new CategoryPath("dim" + dim, dimValues[dim][dimValue]);
FacetLabel cp = new FacetLabel("dim" + dim, dimValues[dim][dimValue]);
paths.add(cp);
doc.add(new StringField("dim" + dim, dimValues[dim][dimValue], Field.Store.YES));
if (VERBOSE) {
@ -549,7 +549,7 @@ public class TestDrillSideways extends FacetTestCase {
}
int dimValue2 = rawDoc.dims2[dim];
if (dimValue2 != -1) {
CategoryPath cp = new CategoryPath("dim" + dim, dimValues[dim][dimValue2]);
FacetLabel cp = new FacetLabel("dim" + dim, dimValues[dim][dimValue2]);
paths.add(cp);
doc.add(new StringField("dim" + dim, dimValues[dim][dimValue2], Field.Store.YES));
if (VERBOSE) {
@ -632,7 +632,7 @@ public class TestDrillSideways extends FacetTestCase {
if (VERBOSE) {
System.out.println(" do facet request on dim=" + i);
}
requests.add(new CountFacetRequest(new CategoryPath("dim" + i), dimValues[numDims-1].length));
requests.add(new CountFacetRequest(new FacetLabel("dim" + i), dimValues[numDims-1].length));
} else {
if (VERBOSE) {
System.out.println(" skip facet request on dim=" + i);
@ -696,10 +696,10 @@ public class TestDrillSideways extends FacetTestCase {
for(int dim=0;dim<numDims;dim++) {
if (drillDowns[dim] != null) {
CategoryPath[] paths = new CategoryPath[drillDowns[dim].length];
FacetLabel[] paths = new FacetLabel[drillDowns[dim].length];
int upto = 0;
for(String value : drillDowns[dim]) {
paths[upto++] = new CategoryPath("dim" + dim, value);
paths[upto++] = new FacetLabel("dim" + dim, value);
}
ddq.add(paths);
}
@ -1152,11 +1152,11 @@ public class TestDrillSideways extends FacetTestCase {
taxoWriter.close();
// Count "Author"
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("Author"), 10));
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new FacetLabel("Author"), 10));
DrillSideways ds = new DrillSideways(searcher, taxoReader);
DrillDownQuery ddq = new DrillDownQuery(fsp.indexingParams, new MatchAllDocsQuery());
ddq.add(new CategoryPath("Author", "Lisa"));
ddq.add(new FacetLabel("Author", "Lisa"));
DrillSidewaysResult r = ds.search(null, ddq, 10, fsp); // this used to fail on IllegalArgEx
assertEquals(0, r.hits.totalHits);

View File

@ -29,7 +29,7 @@ import org.apache.lucene.facet.sampling.TakmiSampleFixer;
import org.apache.lucene.facet.search.FacetRequest.ResultMode;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesAccumulator;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -79,7 +79,7 @@ public class TestFacetsCollector extends FacetTestCase {
if (random().nextBoolean()) { // don't match all documents
doc.add(new StringField("f", "v", Store.NO));
}
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("a")));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel("a")));
iw.addDocument(doc);
}
@ -89,7 +89,7 @@ public class TestFacetsCollector extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(indexDir);
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
FacetSearchParams fsp = new FacetSearchParams(new SumScoreFacetRequest(new CategoryPath("a"), 10));
FacetSearchParams fsp = new FacetSearchParams(new SumScoreFacetRequest(new FacetLabel("a"), 10));
FacetsCollector fc = FacetsCollector.create(fsp, r, taxo);
TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false);
ConstantScoreQuery csq = new ConstantScoreQuery(new MatchAllDocsQuery());
@ -113,15 +113,15 @@ public class TestFacetsCollector extends FacetTestCase {
TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
FacetIndexingParams fip = new PerDimensionIndexingParams(Collections.singletonMap(new CategoryPath("b"), new CategoryListParams("$b")));
FacetIndexingParams fip = new PerDimensionIndexingParams(Collections.singletonMap(new FacetLabel("b"), new CategoryListParams("$b")));
FacetFields facetFields = new FacetFields(taxonomyWriter, fip);
for(int i = atLeast(30); i > 0; --i) {
Document doc = new Document();
doc.add(new StringField("f", "v", Store.NO));
List<CategoryPath> cats = new ArrayList<CategoryPath>();
cats.add(new CategoryPath("a"));
cats.add(new CategoryPath("b"));
List<FacetLabel> cats = new ArrayList<FacetLabel>();
cats.add(new FacetLabel("a"));
cats.add(new FacetLabel("b"));
facetFields.addFields(doc, cats);
iw.addDocument(doc);
}
@ -133,8 +133,8 @@ public class TestFacetsCollector extends FacetTestCase {
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
FacetSearchParams sParams = new FacetSearchParams(fip,
new CountFacetRequest(new CategoryPath("a"), 10),
new CountFacetRequest(new CategoryPath("b"), 10));
new CountFacetRequest(new FacetLabel("a"), 10),
new CountFacetRequest(new FacetLabel("b"), 10));
FacetsCollector fc = FacetsCollector.create(sParams, r, taxo);
newSearcher(r).search(new MatchAllDocsQuery(), fc);
@ -152,15 +152,15 @@ public class TestFacetsCollector extends FacetTestCase {
TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
FacetIndexingParams fip = new PerDimensionIndexingParams(Collections.singletonMap(new CategoryPath("b"), new CategoryListParams("$b")));
FacetIndexingParams fip = new PerDimensionIndexingParams(Collections.singletonMap(new FacetLabel("b"), new CategoryListParams("$b")));
FacetFields facetFields = new FacetFields(taxonomyWriter, fip);
for(int i = atLeast(30); i > 0; --i) {
Document doc = new Document();
doc.add(new StringField("f", "v", Store.NO));
List<CategoryPath> cats = new ArrayList<CategoryPath>();
cats.add(new CategoryPath("a"));
cats.add(new CategoryPath("b"));
List<FacetLabel> cats = new ArrayList<FacetLabel>();
cats.add(new FacetLabel("a"));
cats.add(new FacetLabel("b"));
facetFields.addFields(doc, cats);
iw.addDocument(doc);
}
@ -172,8 +172,8 @@ public class TestFacetsCollector extends FacetTestCase {
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
FacetSearchParams sParams = new FacetSearchParams(fip,
new CountFacetRequest(new CategoryPath("a"), 10),
new SumScoreFacetRequest(new CategoryPath("b"), 10));
new CountFacetRequest(new FacetLabel("a"), 10),
new SumScoreFacetRequest(new FacetLabel("b"), 10));
FacetsCollector fc = FacetsCollector.create(sParams, r, taxo);
TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false);
@ -202,7 +202,7 @@ public class TestFacetsCollector extends FacetTestCase {
FacetFields facetFields = new FacetFields(taxonomyWriter);
for(int i = atLeast(30); i > 0; --i) {
Document doc = new Document();
facetFields.addFields(doc, Arrays.asList(new CategoryPath("a"), new CategoryPath("b")));
facetFields.addFields(doc, Arrays.asList(new FacetLabel("a"), new FacetLabel("b")));
iw.addDocument(doc);
}
@ -212,7 +212,7 @@ public class TestFacetsCollector extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(indexDir);
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(CategoryPath.EMPTY, 10));
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(FacetLabel.EMPTY, 10));
final TaxonomyFacetsAccumulator fa = random().nextBoolean() ? new TaxonomyFacetsAccumulator(fsp, r, taxo) : new OldFacetsAccumulator(fsp, r, taxo);
FacetsCollector fc = FacetsCollector.create(fa);
@ -237,7 +237,7 @@ public class TestFacetsCollector extends FacetTestCase {
FacetFields facetFields = new FacetFields(taxonomyWriter);
Document doc = new Document();
facetFields.addFields(doc, Arrays.asList(new CategoryPath("a/1", '/'), new CategoryPath("b/1", '/')));
facetFields.addFields(doc, Arrays.asList(new FacetLabel("a/1", '/'), new FacetLabel("b/1", '/')));
iw.addDocument(doc);
taxonomyWriter.close();
iw.close();
@ -246,8 +246,8 @@ public class TestFacetsCollector extends FacetTestCase {
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
FacetSearchParams fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("a"), 10),
new CountFacetRequest(new CategoryPath("b"), 10));
new CountFacetRequest(new FacetLabel("a"), 10),
new CountFacetRequest(new FacetLabel("b"), 10));
final TaxonomyFacetsAccumulator fa = random().nextBoolean() ? new TaxonomyFacetsAccumulator(fsp, r, taxo) : new OldFacetsAccumulator(fsp, r, taxo);
final FacetsCollector fc = FacetsCollector.create(fa);
newSearcher(r).search(new MatchAllDocsQuery(), fc);
@ -269,7 +269,7 @@ public class TestFacetsCollector extends FacetTestCase {
FacetFields facetFields = new FacetFields(taxonomyWriter);
Document doc = new Document();
facetFields.addFields(doc, Arrays.asList(new CategoryPath("a/1", '/'), new CategoryPath("b/1", '/')));
facetFields.addFields(doc, Arrays.asList(new FacetLabel("a/1", '/'), new FacetLabel("b/1", '/')));
iw.addDocument(doc);
taxonomyWriter.close();
iw.close();
@ -278,8 +278,8 @@ public class TestFacetsCollector extends FacetTestCase {
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
FacetSearchParams fsp = new FacetSearchParams(
new CountFacetRequest(new CategoryPath("a"), 10),
new CountFacetRequest(new CategoryPath("b"), 10));
new CountFacetRequest(new FacetLabel("a"), 10),
new CountFacetRequest(new FacetLabel("b"), 10));
final TaxonomyFacetsAccumulator fa = random().nextBoolean() ? new TaxonomyFacetsAccumulator(fsp, r, taxo) : new OldFacetsAccumulator(fsp, r, taxo);
final FacetsCollector fc = FacetsCollector.create(fa);
// this should populate the cached results, but doing search should clear the cache
@ -311,7 +311,7 @@ public class TestFacetsCollector extends FacetTestCase {
FacetFields facetFields = new FacetFields(taxonomyWriter);
Document doc = new Document();
facetFields.addFields(doc, Arrays.asList(new CategoryPath("a/1", '/')));
facetFields.addFields(doc, Arrays.asList(new FacetLabel("a/1", '/')));
iw.addDocument(doc);
taxonomyWriter.close();
iw.close();
@ -320,14 +320,14 @@ public class TestFacetsCollector extends FacetTestCase {
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
// assert IntFacetResultHandler
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("a"), 10));
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new FacetLabel("a"), 10));
TaxonomyFacetsAccumulator fa = random().nextBoolean() ? new TaxonomyFacetsAccumulator(fsp, r, taxo) : new OldFacetsAccumulator(fsp, r, taxo);
FacetsCollector fc = FacetsCollector.create(fa);
newSearcher(r).search(new MatchAllDocsQuery(), fc);
assertTrue("invalid ordinal for child node: 0", 0 != fc.getFacetResults().get(0).getFacetResultNode().subResults.get(0).ordinal);
// assert IntFacetResultHandler
fsp = new FacetSearchParams(new SumScoreFacetRequest(new CategoryPath("a"), 10));
fsp = new FacetSearchParams(new SumScoreFacetRequest(new FacetLabel("a"), 10));
if (random().nextBoolean()) {
fa = new TaxonomyFacetsAccumulator(fsp, r, taxo);
} else {
@ -352,7 +352,7 @@ public class TestFacetsCollector extends FacetTestCase {
FacetFields facetFields = new FacetFields(taxonomyWriter);
for (int i = 0; i < 10; i++) {
Document doc = new Document();
facetFields.addFields(doc, Arrays.asList(new CategoryPath("a", Integer.toString(i))));
facetFields.addFields(doc, Arrays.asList(new FacetLabel("a", Integer.toString(i))));
iw.addDocument(doc);
}
@ -362,7 +362,7 @@ public class TestFacetsCollector extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(indexDir);
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
CountFacetRequest cfr = new CountFacetRequest(new CategoryPath("a"), 2);
CountFacetRequest cfr = new CountFacetRequest(new FacetLabel("a"), 2);
cfr.setResultMode(random().nextBoolean() ? ResultMode.GLOBAL_FLAT : ResultMode.PER_NODE_IN_TREE);
FacetSearchParams fsp = new FacetSearchParams(cfr);
final TaxonomyFacetsAccumulator fa = random().nextBoolean() ? new TaxonomyFacetsAccumulator(fsp, r, taxo) : new OldFacetsAccumulator(fsp, r, taxo);
@ -384,7 +384,7 @@ public class TestFacetsCollector extends FacetTestCase {
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
FacetFields facetFields = new FacetFields(taxoWriter);
Document doc = new Document();
facetFields.addFields(doc, Arrays.asList(new CategoryPath("A/1", '/')));
facetFields.addFields(doc, Arrays.asList(new FacetLabel("A/1", '/')));
indexWriter.addDocument(doc);
IOUtils.close(indexWriter, taxoWriter);
@ -392,7 +392,7 @@ public class TestFacetsCollector extends FacetTestCase {
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
// ask to count a non-existing category to test labeling
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("B"), 5));
FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new FacetLabel("B"), 5));
final SamplingParams sampleParams = new SamplingParams();
sampleParams.setMaxSampleSize(100);
@ -434,7 +434,7 @@ public class TestFacetsCollector extends FacetTestCase {
searcher.search(new MatchAllDocsQuery(), fc);
List<FacetResult> facetResults = fc.getFacetResults();
assertNotNull(facetResults);
assertEquals("incorrect label returned for RangeAccumulator", new CategoryPath("f"), facetResults.get(0).getFacetResultNode().label);
assertEquals("incorrect label returned for RangeAccumulator", new FacetLabel("f"), facetResults.get(0).getFacetResultNode().label);
IOUtils.close(indexReader, taxoReader);

View File

@ -20,7 +20,7 @@ import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.params.PerDimensionIndexingParams;
import org.apache.lucene.facet.search.FacetRequest.ResultMode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -58,16 +58,16 @@ import org.junit.Test;
public class TestMultipleCategoryLists extends FacetTestCase {
private static final CategoryPath[] CATEGORIES = new CategoryPath[] {
new CategoryPath("Author", "Mark Twain"),
new CategoryPath("Author", "Stephen King"),
new CategoryPath("Author", "Kurt Vonnegut"),
new CategoryPath("Band", "Rock & Pop", "The Beatles"),
new CategoryPath("Band", "Punk", "The Ramones"),
new CategoryPath("Band", "Rock & Pop", "U2"),
new CategoryPath("Band", "Rock & Pop", "REM"),
new CategoryPath("Band", "Rock & Pop", "Dave Matthews Band"),
new CategoryPath("Composer", "Bach"),
private static final FacetLabel[] CATEGORIES = new FacetLabel[] {
new FacetLabel("Author", "Mark Twain"),
new FacetLabel("Author", "Stephen King"),
new FacetLabel("Author", "Kurt Vonnegut"),
new FacetLabel("Band", "Rock & Pop", "The Beatles"),
new FacetLabel("Band", "Punk", "The Ramones"),
new FacetLabel("Band", "Rock & Pop", "U2"),
new FacetLabel("Band", "Rock & Pop", "REM"),
new FacetLabel("Band", "Rock & Pop", "Dave Matthews Band"),
new FacetLabel("Composer", "Bach"),
};
@Test
@ -81,7 +81,7 @@ public class TestMultipleCategoryLists extends FacetTestCase {
// create and open a taxonomy writer
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams(Collections.<CategoryPath, CategoryListParams>emptyMap());
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams(Collections.<FacetLabel, CategoryListParams>emptyMap());
seedIndex(iw, tw, iParams);
@ -117,7 +117,7 @@ public class TestMultipleCategoryLists extends FacetTestCase {
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams(
Collections.singletonMap(new CategoryPath("Author"), new CategoryListParams("$author")));
Collections.singletonMap(new FacetLabel("Author"), new CategoryListParams("$author")));
seedIndex(iw, tw, iParams);
IndexReader ir = iw.getReader();
@ -152,9 +152,9 @@ public class TestMultipleCategoryLists extends FacetTestCase {
// create and open a taxonomy writer
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
Map<CategoryPath,CategoryListParams> paramsMap = new HashMap<CategoryPath,CategoryListParams>();
paramsMap.put(new CategoryPath("Band"), new CategoryListParams("$music"));
paramsMap.put(new CategoryPath("Composer"), new CategoryListParams("$music"));
Map<FacetLabel,CategoryListParams> paramsMap = new HashMap<FacetLabel,CategoryListParams>();
paramsMap.put(new FacetLabel("Band"), new CategoryListParams("$music"));
paramsMap.put(new FacetLabel("Composer"), new CategoryListParams("$music"));
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams(paramsMap);
seedIndex(iw, tw, iParams);
@ -201,9 +201,9 @@ public class TestMultipleCategoryLists extends FacetTestCase {
// create and open a taxonomy writer
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
Map<CategoryPath,CategoryListParams> paramsMap = new HashMap<CategoryPath,CategoryListParams>();
paramsMap.put(new CategoryPath("Band"), new CategoryListParams("$bands"));
paramsMap.put(new CategoryPath("Composer"), new CategoryListParams("$composers"));
Map<FacetLabel,CategoryListParams> paramsMap = new HashMap<FacetLabel,CategoryListParams>();
paramsMap.put(new FacetLabel("Band"), new CategoryListParams("$bands"));
paramsMap.put(new FacetLabel("Composer"), new CategoryListParams("$composers"));
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams(paramsMap);
seedIndex(iw, tw, iParams);
@ -239,10 +239,10 @@ public class TestMultipleCategoryLists extends FacetTestCase {
// create and open a taxonomy writer
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
Map<CategoryPath,CategoryListParams> paramsMap = new HashMap<CategoryPath,CategoryListParams>();
paramsMap.put(new CategoryPath("Band"), new CategoryListParams("$music"));
paramsMap.put(new CategoryPath("Composer"), new CategoryListParams("$music"));
paramsMap.put(new CategoryPath("Author"), new CategoryListParams("$literature"));
Map<FacetLabel,CategoryListParams> paramsMap = new HashMap<FacetLabel,CategoryListParams>();
paramsMap.put(new FacetLabel("Band"), new CategoryListParams("$music"));
paramsMap.put(new FacetLabel("Composer"), new CategoryListParams("$music"));
paramsMap.put(new FacetLabel("Author"), new CategoryListParams("$literature"));
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams(paramsMap);
seedIndex(iw, tw, iParams);
@ -318,14 +318,14 @@ public class TestMultipleCategoryLists extends FacetTestCase {
TopScoreDocCollector topDocsCollector = TopScoreDocCollector.create(10, true);
List<FacetRequest> facetRequests = new ArrayList<FacetRequest>();
facetRequests.add(new CountFacetRequest(new CategoryPath("Band"), 10));
CountFacetRequest bandDepth = new CountFacetRequest(new CategoryPath("Band"), 10);
facetRequests.add(new CountFacetRequest(new FacetLabel("Band"), 10));
CountFacetRequest bandDepth = new CountFacetRequest(new FacetLabel("Band"), 10);
bandDepth.setDepth(2);
// makes it easier to check the results in the test.
bandDepth.setResultMode(ResultMode.GLOBAL_FLAT);
facetRequests.add(bandDepth);
facetRequests.add(new CountFacetRequest(new CategoryPath("Author"), 10));
facetRequests.add(new CountFacetRequest(new CategoryPath("Band", "Rock & Pop"), 10));
facetRequests.add(new CountFacetRequest(new FacetLabel("Author"), 10));
facetRequests.add(new CountFacetRequest(new FacetLabel("Band", "Rock & Pop"), 10));
// Faceted search parameters indicate which facets are we interested in
FacetSearchParams facetSearchParams = new FacetSearchParams(iParams, facetRequests);
@ -338,7 +338,7 @@ public class TestMultipleCategoryLists extends FacetTestCase {
private void seedIndex(RandomIndexWriter iw, TaxonomyWriter tw, FacetIndexingParams iParams) throws IOException {
FacetFields facetFields = new FacetFields(tw, iParams);
for (CategoryPath cp : CATEGORIES) {
for (FacetLabel cp : CATEGORIES) {
Document doc = new Document();
facetFields.addFields(doc, Collections.singletonList(cp));
doc.add(new TextField("content", "alpha", Field.Store.YES));

View File

@ -6,7 +6,7 @@ import org.apache.lucene.facet.FacetTestBase;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.junit.After;
import org.junit.Before;
@ -43,7 +43,7 @@ public class TestSameRequestAccumulation extends FacetTestBase {
// Following LUCENE-4461 - ensure requesting the (exact) same request more
// than once does not alter the results
public void testTwoSameRequests() throws Exception {
final CountFacetRequest facetRequest = new CountFacetRequest(new CategoryPath("root"), 10);
final CountFacetRequest facetRequest = new CountFacetRequest(new FacetLabel("root"), 10);
FacetSearchParams fsp = new FacetSearchParams(fip, facetRequest);
FacetsCollector fc = FacetsCollector.create(fsp, indexReader, taxoReader);
@ -52,7 +52,7 @@ public class TestSameRequestAccumulation extends FacetTestBase {
final String expected = fc.getFacetResults().get(0).toString();
// now add the same facet request with duplicates (same instance and same one)
fsp = new FacetSearchParams(fip, facetRequest, facetRequest, new CountFacetRequest(new CategoryPath("root"), 10));
fsp = new FacetSearchParams(fip, facetRequest, facetRequest, new CountFacetRequest(new FacetLabel("root"), 10));
// make sure the search params holds 3 requests now
assertEquals(3, fsp.facetRequests.size());

View File

@ -31,7 +31,7 @@ import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.SearcherTaxonomyManager.SearcherAndTaxonomy;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.MatchAllDocsQuery;
@ -59,7 +59,7 @@ public class TestSearcherTaxonomyManager extends FacetTestCase {
List<String> paths = new ArrayList<String>();
while (true) {
Document doc = new Document();
List<CategoryPath> docPaths = new ArrayList<CategoryPath>();
List<FacetLabel> docPaths = new ArrayList<FacetLabel>();
int numPaths = _TestUtil.nextInt(random(), 1, 5);
for(int i=0;i<numPaths;i++) {
String path;
@ -78,7 +78,7 @@ public class TestSearcherTaxonomyManager extends FacetTestCase {
}
}
}
docPaths.add(new CategoryPath("field", path));
docPaths.add(new FacetLabel("field", path));
}
try {
facetFields.addFields(doc, docPaths);
@ -132,7 +132,7 @@ public class TestSearcherTaxonomyManager extends FacetTestCase {
try {
//System.out.println("search maxOrd=" + pair.taxonomyReader.getSize());
int topN = _TestUtil.nextInt(random(), 1, 20);
CountFacetRequest cfr = new CountFacetRequest(new CategoryPath("field"), topN);
CountFacetRequest cfr = new CountFacetRequest(new FacetLabel("field"), topN);
FacetSearchParams fsp = new FacetSearchParams(cfr);
FacetsCollector fc = FacetsCollector.create(fsp, pair.searcher.getIndexReader(), pair.taxonomyReader);
pair.searcher.search(new MatchAllDocsQuery(), fc);

View File

@ -13,7 +13,7 @@ import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -57,7 +57,7 @@ public class TestStandardFacetsAccumulator extends FacetTestCase {
doc.add(new StringField("f", "a", Store.NO));
}
if (facetFields != null) {
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("A", Integer.toString(i))));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel("A", Integer.toString(i))));
}
indexWriter.addDocument(doc);
}
@ -97,7 +97,7 @@ public class TestStandardFacetsAccumulator extends FacetTestCase {
// search for "f:a", only segments 1 and 3 should match results
Query q = new TermQuery(new Term("f", "a"));
FacetRequest countNoComplements = new CountFacetRequest(new CategoryPath("A"), 10);
FacetRequest countNoComplements = new CountFacetRequest(new FacetLabel("A"), 10);
FacetSearchParams fsp = new FacetSearchParams(fip, countNoComplements);
FacetsCollector fc = FacetsCollector.create(fsp , indexReader, taxoReader);
indexSearcher.search(q, fc);

View File

@ -14,7 +14,7 @@ import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -66,7 +66,7 @@ public class TestSumValueSourceFacetRequest extends FacetTestCase {
for (int i = 0; i < 4; i++) {
Document doc = new Document();
doc.add(new NumericDocValuesField("price", (i+1)));
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("a", Integer.toString(i % 2))));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel("a", Integer.toString(i % 2))));
iw.addDocument(doc);
}
@ -77,7 +77,7 @@ public class TestSumValueSourceFacetRequest extends FacetTestCase {
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
ValueSource valueSource = new LongFieldSource("price");
FacetSearchParams fsp = new FacetSearchParams(new SumValueSourceFacetRequest(new CategoryPath("a"), 10, valueSource, false));
FacetSearchParams fsp = new FacetSearchParams(new SumValueSourceFacetRequest(new FacetLabel("a"), 10, valueSource, false));
FacetsCollector fc = FacetsCollector.create(fsp, r, taxo);
newSearcher(r).search(new MatchAllDocsQuery(), fc);
@ -99,7 +99,7 @@ public class TestSumValueSourceFacetRequest extends FacetTestCase {
for (int i = 0; i < 4; i++) {
Document doc = new Document();
doc.add(new NumericDocValuesField("price", (i+1)));
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("a", Integer.toString(i % 2))));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel("a", Integer.toString(i % 2))));
iw.addDocument(doc);
}
@ -131,7 +131,7 @@ public class TestSumValueSourceFacetRequest extends FacetTestCase {
@Override public String description() { return "score()"; }
};
FacetSearchParams fsp = new FacetSearchParams(new SumValueSourceFacetRequest(new CategoryPath("a"), 10, valueSource, true));
FacetSearchParams fsp = new FacetSearchParams(new SumValueSourceFacetRequest(new FacetLabel("a"), 10, valueSource, true));
FacetsCollector fc = FacetsCollector.create(fsp, r, taxo);
TopScoreDocCollector tsdc = TopScoreDocCollector.create(10, true);
// score documents by their 'price' field - makes asserting the correct counts for the categories easier
@ -161,7 +161,7 @@ public class TestSumValueSourceFacetRequest extends FacetTestCase {
for (int i = 0; i < 4; i++) {
Document doc = new Document();
doc.add(new NumericDocValuesField("price", (i+1)));
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("a", Integer.toString(i % 2), "1")));
facetFields.addFields(doc, Collections.singletonList(new FacetLabel("a", Integer.toString(i % 2), "1")));
iw.addDocument(doc);
}
@ -172,7 +172,7 @@ public class TestSumValueSourceFacetRequest extends FacetTestCase {
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
ValueSource valueSource = new LongFieldSource("price");
FacetSearchParams fsp = new FacetSearchParams(fip, new SumValueSourceFacetRequest(new CategoryPath("a"), 10, valueSource, false));
FacetSearchParams fsp = new FacetSearchParams(fip, new SumValueSourceFacetRequest(new FacetLabel("a"), 10, valueSource, false));
FacetsCollector fc = FacetsCollector.create(fsp, r, taxo);
newSearcher(r).search(new MatchAllDocsQuery(), fc);

View File

@ -15,7 +15,7 @@ import org.apache.lucene.facet.old.OldFacetsAccumulator;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.FacetRequest.ResultMode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
@ -107,35 +107,35 @@ public class TestTopKInEachNodeResultHandler extends FacetTestCase {
// facet counts and compare to control
Query q = new TermQuery(new Term("content", "alpha"));
CountFacetRequest cfra23 = new CountFacetRequest(new CategoryPath("a"), 2);
CountFacetRequest cfra23 = new CountFacetRequest(new FacetLabel("a"), 2);
cfra23.setDepth(3);
cfra23.setResultMode(ResultMode.PER_NODE_IN_TREE);
CountFacetRequest cfra22 = new CountFacetRequest(new CategoryPath("a"), 2);
CountFacetRequest cfra22 = new CountFacetRequest(new FacetLabel("a"), 2);
cfra22.setDepth(2);
cfra22.setResultMode(ResultMode.PER_NODE_IN_TREE);
CountFacetRequest cfra21 = new CountFacetRequest(new CategoryPath("a"), 2);
CountFacetRequest cfra21 = new CountFacetRequest(new FacetLabel("a"), 2);
cfra21.setDepth(1);
cfra21.setResultMode(ResultMode.PER_NODE_IN_TREE);
CountFacetRequest cfrb22 = new CountFacetRequest(new CategoryPath("a", "b"), 2);
CountFacetRequest cfrb22 = new CountFacetRequest(new FacetLabel("a", "b"), 2);
cfrb22.setDepth(2);
cfrb22.setResultMode(ResultMode.PER_NODE_IN_TREE);
CountFacetRequest cfrb23 = new CountFacetRequest(new CategoryPath("a", "b"), 2);
CountFacetRequest cfrb23 = new CountFacetRequest(new FacetLabel("a", "b"), 2);
cfrb23.setDepth(3);
cfrb23.setResultMode(ResultMode.PER_NODE_IN_TREE);
CountFacetRequest cfrb21 = new CountFacetRequest(new CategoryPath("a", "b"), 2);
CountFacetRequest cfrb21 = new CountFacetRequest(new FacetLabel("a", "b"), 2);
cfrb21.setDepth(1);
cfrb21.setResultMode(ResultMode.PER_NODE_IN_TREE);
CountFacetRequest doctor = new CountFacetRequest(new CategoryPath("Doctor"), 2);
CountFacetRequest doctor = new CountFacetRequest(new FacetLabel("Doctor"), 2);
doctor.setDepth(1);
doctor.setResultMode(ResultMode.PER_NODE_IN_TREE);
CountFacetRequest cfrb20 = new CountFacetRequest(new CategoryPath("a", "b"), 2);
CountFacetRequest cfrb20 = new CountFacetRequest(new FacetLabel("a", "b"), 2);
cfrb20.setDepth(0);
cfrb20.setResultMode(ResultMode.PER_NODE_IN_TREE);
@ -296,7 +296,7 @@ public class TestTopKInEachNodeResultHandler extends FacetTestCase {
TaxonomyWriter tw, String... strings) throws IOException {
Document d = new Document();
FacetFields facetFields = new FacetFields(tw, iParams);
facetFields.addFields(d, Collections.singletonList(new CategoryPath(strings)));
facetFields.addFields(d, Collections.singletonList(new FacetLabel(strings)));
d.add(new TextField("content", "alpha", Field.Store.YES));
iw.addDocument(d);
}

View File

@ -8,7 +8,7 @@ import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.params.CategoryListParams.OrdinalPolicy;
import org.apache.lucene.facet.search.FacetRequest.ResultMode;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.junit.Test;
@ -31,20 +31,20 @@ import org.junit.Test;
public class TestTopKResultsHandler extends BaseTestTopK {
private static final CategoryPath[] CATEGORIES = {
new CategoryPath( "a", "b"),
new CategoryPath( "a", "b", "1"),
new CategoryPath( "a", "b", "1"),
new CategoryPath( "a", "b", "2"),
new CategoryPath( "a", "b", "2"),
new CategoryPath( "a", "b", "3"),
new CategoryPath( "a", "b", "4"),
new CategoryPath( "a", "c"),
new CategoryPath( "a", "c"),
new CategoryPath( "a", "c"),
new CategoryPath( "a", "c"),
new CategoryPath( "a", "c"),
new CategoryPath( "a", "c", "1"),
private static final FacetLabel[] CATEGORIES = {
new FacetLabel( "a", "b"),
new FacetLabel( "a", "b", "1"),
new FacetLabel( "a", "b", "1"),
new FacetLabel( "a", "b", "2"),
new FacetLabel( "a", "b", "2"),
new FacetLabel( "a", "b", "3"),
new FacetLabel( "a", "b", "4"),
new FacetLabel( "a", "c"),
new FacetLabel( "a", "c"),
new FacetLabel( "a", "c"),
new FacetLabel( "a", "c"),
new FacetLabel( "a", "c"),
new FacetLabel( "a", "c", "1"),
};
@Override
@ -58,7 +58,7 @@ public class TestTopKResultsHandler extends BaseTestTopK {
}
@Override
protected List<CategoryPath> getCategories(int doc) {
protected List<FacetLabel> getCategories(int doc) {
return Arrays.asList(CATEGORIES[doc]);
}
@ -74,15 +74,15 @@ public class TestTopKResultsHandler extends BaseTestTopK {
initIndex(fip);
List<FacetRequest> facetRequests = new ArrayList<FacetRequest>();
facetRequests.add(new CountFacetRequest(new CategoryPath("a"), 100));
CountFacetRequest cfra = new CountFacetRequest(new CategoryPath("a"), 100);
facetRequests.add(new CountFacetRequest(new FacetLabel("a"), 100));
CountFacetRequest cfra = new CountFacetRequest(new FacetLabel("a"), 100);
cfra.setDepth(3);
// makes it easier to check the results in the test.
cfra.setResultMode(ResultMode.GLOBAL_FLAT);
facetRequests.add(cfra);
facetRequests.add(new CountFacetRequest(new CategoryPath("a", "b"), 100));
facetRequests.add(new CountFacetRequest(new CategoryPath("a", "b", "1"), 100));
facetRequests.add(new CountFacetRequest(new CategoryPath("a", "c"), 100));
facetRequests.add(new CountFacetRequest(new FacetLabel("a", "b"), 100));
facetRequests.add(new CountFacetRequest(new FacetLabel("a", "b", "1"), 100));
facetRequests.add(new CountFacetRequest(new FacetLabel("a", "c"), 100));
// do different facet counts and compare to control
FacetSearchParams sParams = getFacetSearchParams(facetRequests, fip);
@ -153,7 +153,7 @@ public class TestTopKResultsHandler extends BaseTestTopK {
initIndex(fip);
// do different facet counts and compare to control
CategoryPath path = new CategoryPath("a", "b");
FacetLabel path = new FacetLabel("a", "b");
FacetSearchParams sParams = getFacetSearchParams(fip, new CountFacetRequest(path, Integer.MAX_VALUE));
FacetsCollector fc = FacetsCollector.create(sParams, indexReader, taxoReader);
@ -193,7 +193,7 @@ public class TestTopKResultsHandler extends BaseTestTopK {
FacetIndexingParams fip = getFacetIndexingParams(partitionSize);
initIndex(fip);
CategoryPath path = new CategoryPath("Miau Hattulla");
FacetLabel path = new FacetLabel("Miau Hattulla");
FacetSearchParams sParams = getFacetSearchParams(fip, new CountFacetRequest(path, 10));
FacetsCollector fc = FacetsCollector.create(sParams, indexReader, taxoReader);

View File

@ -47,7 +47,7 @@ import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.search.FacetsAccumulator;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.sortedset.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;

View File

@ -27,7 +27,7 @@ import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.simple.SortedSetDocValuesFacetCounts;
import org.apache.lucene.facet.simple.SortedSetDocValuesReaderState;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
@ -80,8 +80,8 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// DrillDown:
SimpleDrillDownQuery q = new SimpleDrillDownQuery();
q.add(new CategoryPath("a", "foo"));
q.add(new CategoryPath("b", "baz"));
q.add(new FacetLabel("a", "foo"));
q.add(new FacetLabel("b", "baz"));
TopDocs hits = searcher.search(q, 1);
assertEquals(1, hits.totalHits);

View File

@ -30,7 +30,7 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
@ -117,7 +117,7 @@ public class TestTaxonomyFacets extends FacetTestCase {
// Now user drills down on Publish Date/2010:
SimpleDrillDownQuery q2 = new SimpleDrillDownQuery(new MatchAllDocsQuery());
q2.add(new CategoryPath("Publish Date", "2010"));
q2.add(new FacetLabel("Publish Date", "2010"));
c = new SimpleFacetsCollector();
searcher.search(q2, c);
facets = new TaxonomyFacetCounts(taxoReader, fts, c);

View File

@ -31,7 +31,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntField;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;

View File

@ -32,7 +32,7 @@ import org.apache.lucene.facet.search.DrillDownQuery;
import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.FacetLabel;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
@ -66,14 +66,14 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// Mixup order we add these paths, to verify tie-break
// order is by label (unicode sort) and has nothing to
// do w/ order we added them:
List<CategoryPath> paths = new ArrayList<CategoryPath>();
paths.add(new CategoryPath("a", "foo"));
paths.add(new CategoryPath("a", "bar"));
paths.add(new CategoryPath("a", "zoo"));
List<FacetLabel> paths = new ArrayList<FacetLabel>();
paths.add(new FacetLabel("a", "foo"));
paths.add(new FacetLabel("a", "bar"));
paths.add(new FacetLabel("a", "zoo"));
Collections.shuffle(paths, random());
paths.add(new CategoryPath("b", "baz"));
paths.add(new CategoryPath("b" + FacetIndexingParams.DEFAULT_FACET_DELIM_CHAR, "bazfoo"));
paths.add(new FacetLabel("b", "baz"));
paths.add(new FacetLabel("b" + FacetIndexingParams.DEFAULT_FACET_DELIM_CHAR, "bazfoo"));
dvFields.addFields(doc, paths);
@ -83,7 +83,7 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
}
doc = new Document();
dvFields.addFields(doc, Collections.singletonList(new CategoryPath("a", "foo")));
dvFields.addFields(doc, Collections.singletonList(new FacetLabel("a", "foo")));
writer.addDocument(doc);
// NRT open
@ -91,9 +91,9 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
writer.close();
List<FacetRequest> requests = new ArrayList<FacetRequest>();
requests.add(new CountFacetRequest(new CategoryPath("a"), 10));
requests.add(new CountFacetRequest(new CategoryPath("b"), 10));
requests.add(new CountFacetRequest(new CategoryPath("b" + FacetIndexingParams.DEFAULT_FACET_DELIM_CHAR), 10));
requests.add(new CountFacetRequest(new FacetLabel("a"), 10));
requests.add(new CountFacetRequest(new FacetLabel("b"), 10));
requests.add(new CountFacetRequest(new FacetLabel("b" + FacetIndexingParams.DEFAULT_FACET_DELIM_CHAR), 10));
final boolean doDimCount = random().nextBoolean();
@ -133,13 +133,13 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
// DrillDown:
DrillDownQuery q = new DrillDownQuery(fip);
q.add(new CategoryPath("a", "foo"));
q.add(new CategoryPath("b", "baz"));
q.add(new FacetLabel("a", "foo"));
q.add(new FacetLabel("b", "baz"));
TopDocs hits = searcher.search(q, 1);
assertEquals(1, hits.totalHits);
q = new DrillDownQuery(fip);
q.add(new CategoryPath("a"));
q.add(new FacetLabel("a"));
hits = searcher.search(q, 1);
assertEquals(2, hits.totalHits);
@ -156,24 +156,24 @@ public class TestSortedSetDocValuesFacets extends FacetTestCase {
SortedSetDocValuesFacetFields dvFields = new SortedSetDocValuesFacetFields();
Document doc = new Document();
dvFields.addFields(doc, Collections.singletonList(new CategoryPath("a", "foo")));
dvFields.addFields(doc, Collections.singletonList(new FacetLabel("a", "foo")));
writer.addDocument(doc);
IndexReader r = writer.getReader();
SortedSetDocValuesReaderState state = new SortedSetDocValuesReaderState(r);
doc = new Document();
dvFields.addFields(doc, Collections.singletonList(new CategoryPath("a", "bar")));
dvFields.addFields(doc, Collections.singletonList(new FacetLabel("a", "bar")));
writer.addDocument(doc);
doc = new Document();
dvFields.addFields(doc, Collections.singletonList(new CategoryPath("a", "baz")));
dvFields.addFields(doc, Collections.singletonList(new FacetLabel("a", "baz")));
writer.addDocument(doc);
IndexSearcher searcher = newSearcher(writer.getReader());
List<FacetRequest> requests = new ArrayList<FacetRequest>();
requests.add(new CountFacetRequest(new CategoryPath("a"), 10));
requests.add(new CountFacetRequest(new FacetLabel("a"), 10));
FacetSearchParams fsp = new FacetSearchParams(requests);

View File

@ -27,19 +27,19 @@ public class TestCategoryPath extends FacetTestCase {
@Test
public void testBasic() {
assertEquals(0, CategoryPath.EMPTY.length);
assertEquals(1, new CategoryPath("hello").length);
assertEquals(2, new CategoryPath("hello", "world").length);
assertEquals(0, FacetLabel.EMPTY.length);
assertEquals(1, new FacetLabel("hello").length);
assertEquals(2, new FacetLabel("hello", "world").length);
}
@Test
public void testToString() {
// When the category is empty, we expect an empty string
assertEquals("", CategoryPath.EMPTY.toString('/'));
assertEquals("", FacetLabel.EMPTY.toString('/'));
// one category (so no delimiter needed)
assertEquals("hello", new CategoryPath("hello").toString('/'));
assertEquals("hello", new FacetLabel("hello").toString('/'));
// more than one category (so no delimiter needed)
assertEquals("hello/world", new CategoryPath("hello", "world").toString('/'));
assertEquals("hello/world", new FacetLabel("hello", "world").toString('/'));
}
@Test
@ -48,7 +48,7 @@ public class TestCategoryPath extends FacetTestCase {
for (int i = 0; i < components.length; i++) {
components[i] = Integer.toString(i);
}
CategoryPath cp = new CategoryPath(components);
FacetLabel cp = new FacetLabel(components);
for (int i = 0; i < components.length; i++) {
assertEquals(i, Integer.parseInt(cp.components[i]));
}
@ -56,15 +56,15 @@ public class TestCategoryPath extends FacetTestCase {
@Test
public void testDelimiterConstructor() {
CategoryPath p = new CategoryPath("", '/');
FacetLabel p = new FacetLabel("", '/');
assertEquals(0, p.length);
p = new CategoryPath("hello", '/');
p = new FacetLabel("hello", '/');
assertEquals(p.length, 1);
assertEquals(p.toString('@'), "hello");
p = new CategoryPath("hi/there", '/');
p = new FacetLabel("hi/there", '/');
assertEquals(p.length, 2);
assertEquals(p.toString('@'), "hi@there");
p = new CategoryPath("how/are/you/doing?", '/');
p = new FacetLabel("how/are/you/doing?", '/');
assertEquals(p.length, 4);
assertEquals(p.toString('@'), "how@are@you@doing?");
}
@ -75,17 +75,17 @@ public class TestCategoryPath extends FacetTestCase {
// defaults to creating an object with a 0 initial capacity.
// If we change this default later, we also need to change this
// test.
CategoryPath p = CategoryPath.EMPTY;
FacetLabel p = FacetLabel.EMPTY;
assertEquals(0, p.length);
assertEquals("", p.toString('/'));
}
@Test
public void testSubPath() {
final CategoryPath p = new CategoryPath("hi", "there", "man");
final FacetLabel p = new FacetLabel("hi", "there", "man");
assertEquals(p.length, 3);
CategoryPath p1 = p.subpath(2);
FacetLabel p1 = p.subpath(2);
assertEquals(2, p1.length);
assertEquals("hi/there", p1.toString('/'));
@ -109,38 +109,38 @@ public class TestCategoryPath extends FacetTestCase {
@Test
public void testEquals() {
assertEquals(CategoryPath.EMPTY, CategoryPath.EMPTY);
assertFalse(CategoryPath.EMPTY.equals(new CategoryPath("hi")));
assertFalse(CategoryPath.EMPTY.equals(Integer.valueOf(3)));
assertEquals(new CategoryPath("hello", "world"), new CategoryPath("hello", "world"));
assertEquals(FacetLabel.EMPTY, FacetLabel.EMPTY);
assertFalse(FacetLabel.EMPTY.equals(new FacetLabel("hi")));
assertFalse(FacetLabel.EMPTY.equals(Integer.valueOf(3)));
assertEquals(new FacetLabel("hello", "world"), new FacetLabel("hello", "world"));
}
@Test
public void testHashCode() {
assertEquals(CategoryPath.EMPTY.hashCode(), CategoryPath.EMPTY.hashCode());
assertFalse(CategoryPath.EMPTY.hashCode() == new CategoryPath("hi").hashCode());
assertEquals(new CategoryPath("hello", "world").hashCode(), new CategoryPath("hello", "world").hashCode());
assertEquals(FacetLabel.EMPTY.hashCode(), FacetLabel.EMPTY.hashCode());
assertFalse(FacetLabel.EMPTY.hashCode() == new FacetLabel("hi").hashCode());
assertEquals(new FacetLabel("hello", "world").hashCode(), new FacetLabel("hello", "world").hashCode());
}
@Test
public void testLongHashCode() {
assertEquals(CategoryPath.EMPTY.longHashCode(), CategoryPath.EMPTY.longHashCode());
assertFalse(CategoryPath.EMPTY.longHashCode() == new CategoryPath("hi").longHashCode());
assertEquals(new CategoryPath("hello", "world").longHashCode(), new CategoryPath("hello", "world").longHashCode());
assertEquals(FacetLabel.EMPTY.longHashCode(), FacetLabel.EMPTY.longHashCode());
assertFalse(FacetLabel.EMPTY.longHashCode() == new FacetLabel("hi").longHashCode());
assertEquals(new FacetLabel("hello", "world").longHashCode(), new FacetLabel("hello", "world").longHashCode());
}
@Test
public void testArrayConstructor() {
CategoryPath p = new CategoryPath("hello", "world", "yo");
FacetLabel p = new FacetLabel("hello", "world", "yo");
assertEquals(3, p.length);
assertEquals("hello/world/yo", p.toString('/'));
}
@Test
public void testCharsNeededForFullPath() {
assertEquals(0, CategoryPath.EMPTY.fullPathLength());
assertEquals(0, FacetLabel.EMPTY.fullPathLength());
String[] components = { "hello", "world", "yo" };
CategoryPath cp = new CategoryPath(components);
FacetLabel cp = new FacetLabel(components);
int expectedCharsNeeded = 0;
for (String comp : components) {
expectedCharsNeeded += comp.length();
@ -151,7 +151,7 @@ public class TestCategoryPath extends FacetTestCase {
@Test
public void testCopyToCharArray() {
CategoryPath p = new CategoryPath("hello", "world", "yo");
FacetLabel p = new FacetLabel("hello", "world", "yo");
char[] charArray = new char[p.fullPathLength()];
int numCharsCopied = p.copyFullPath(charArray, 0, '.');
assertEquals(p.fullPathLength(), numCharsCopied);
@ -160,20 +160,20 @@ public class TestCategoryPath extends FacetTestCase {
@Test
public void testCompareTo() {
CategoryPath p = new CategoryPath("a/b/c/d", '/');
CategoryPath pother = new CategoryPath("a/b/c/d", '/');
FacetLabel p = new FacetLabel("a/b/c/d", '/');
FacetLabel pother = new FacetLabel("a/b/c/d", '/');
assertEquals(0, pother.compareTo(p));
assertEquals(0, p.compareTo(pother));
pother = new CategoryPath("", '/');
pother = new FacetLabel("", '/');
assertTrue(pother.compareTo(p) < 0);
assertTrue(p.compareTo(pother) > 0);
pother = new CategoryPath("a/b_/c/d", '/');
pother = new FacetLabel("a/b_/c/d", '/');
assertTrue(pother.compareTo(p) > 0);
assertTrue(p.compareTo(pother) < 0);
pother = new CategoryPath("a/b/c", '/');
pother = new FacetLabel("a/b/c", '/');
assertTrue(pother.compareTo(p) < 0);
assertTrue(p.compareTo(pother) > 0);
pother = new CategoryPath("a/b/c/e", '/');
pother = new FacetLabel("a/b/c/e", '/');
assertTrue(pother.compareTo(p) > 0);
assertTrue(p.compareTo(pother) < 0);
}
@ -192,7 +192,7 @@ public class TestCategoryPath extends FacetTestCase {
for (String[] components : components_tests) {
try {
assertNotNull(new CategoryPath(components));
assertNotNull(new FacetLabel(components));
fail("empty or null components should not be allowed: " + Arrays.toString(components));
} catch (IllegalArgumentException e) {
// ok
@ -206,7 +206,7 @@ public class TestCategoryPath extends FacetTestCase {
for (String path : path_tests) {
try {
assertNotNull(new CategoryPath(path, '/'));
assertNotNull(new FacetLabel(path, '/'));
fail("empty or null components should not be allowed: " + path);
} catch (IllegalArgumentException e) {
// ok
@ -214,7 +214,7 @@ public class TestCategoryPath extends FacetTestCase {
}
// a trailing path separator is produces only one component
assertNotNull(new CategoryPath("test/", '/'));
assertNotNull(new FacetLabel("test/", '/'));
}
@ -222,7 +222,7 @@ public class TestCategoryPath extends FacetTestCase {
public void testInvalidDelimChar() throws Exception {
// Make sure CategoryPath doesn't silently corrupt:
char[] buf = new char[100];
CategoryPath cp = new CategoryPath("foo/bar");
FacetLabel cp = new FacetLabel("foo/bar");
try {
cp.toString();
fail("expected exception");
@ -235,7 +235,7 @@ public class TestCategoryPath extends FacetTestCase {
} catch (IllegalArgumentException iae) {
// expected
}
cp = new CategoryPath("abc", "foo/bar");
cp = new FacetLabel("abc", "foo/bar");
try {
cp.toString();
fail("expected exception");
@ -248,7 +248,7 @@ public class TestCategoryPath extends FacetTestCase {
} catch (IllegalArgumentException iae) {
// expected
}
cp = new CategoryPath("foo:bar");
cp = new FacetLabel("foo:bar");
try {
cp.toString(':');
fail("expected exception");
@ -261,7 +261,7 @@ public class TestCategoryPath extends FacetTestCase {
} catch (IllegalArgumentException iae) {
// expected
}
cp = new CategoryPath("abc", "foo:bar");
cp = new FacetLabel("abc", "foo:bar");
try {
cp.toString(':');
fail("expected exception");
@ -280,7 +280,7 @@ public class TestCategoryPath extends FacetTestCase {
public void testLongPath() throws Exception {
String bigComp = null;
while (true) {
int len = CategoryPath.MAX_CATEGORY_PATH_LENGTH;
int len = FacetLabel.MAX_CATEGORY_PATH_LENGTH;
bigComp = _TestUtil.randomSimpleString(random(), len, len);
if (bigComp.indexOf('\u001f') != -1) {
continue;
@ -289,14 +289,14 @@ public class TestCategoryPath extends FacetTestCase {
}
try {
assertNotNull(new CategoryPath("dim", bigComp));
assertNotNull(new FacetLabel("dim", bigComp));
fail("long paths should not be allowed; len=" + bigComp.length());
} catch (IllegalArgumentException e) {
// expected
}
try {
assertNotNull(new CategoryPath("dim\u001f" + bigComp, '\u001f'));
assertNotNull(new FacetLabel("dim\u001f" + bigComp, '\u001f'));
fail("long paths should not be allowed; len=" + bigComp.length());
} catch (IllegalArgumentException e) {
// expected

Some files were not shown because too many files have changed in this diff Show More