LUCENE-5298: add SumValueSourceFacetRequest

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1538431 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Shai Erera 2013-11-03 19:57:06 +00:00
parent dab5e2aa89
commit 442e3253fc
8 changed files with 592 additions and 4 deletions

View File

@ -135,6 +135,10 @@ New Features
numeric DocValues fields of documents, without re-indexing them.
(Shai Erera, Mike McCandless, Robert Muir)
* LUCENE-5298: Add SumValueSourceFacetRequest for aggregating facets by
a ValueSource, such as a NumericDocValuesField or an expression.
(Shai Erera)
Bug Fixes
* LUCENE-4998: Fixed a few places to pass IOContext.READONCE instead

View File

@ -33,22 +33,27 @@
<pathelement path="${analyzers-common.jar}"/>
<pathelement path="${queryparser.jar}"/>
<pathelement path="${lucene-core.jar}"/>
<pathelement path="${queries.jar}"/>
<pathelement path="${facet.jar}"/>
<pathelement path="${expressions.jar}"/>
<fileset dir="../expressions/lib"/>
<fileset dir="lib"/>
</path>
<target name="javadocs" depends="javadocs-analyzers-common,javadocs-queryparser,javadocs-facet,compile-core">
<target name="javadocs" depends="javadocs-analyzers-common,javadocs-queryparser,javadocs-facet,javadocs-expressions,compile-core">
<!-- we link the example source in the javadocs, as its ref'ed elsewhere -->
<invoke-module-javadoc linksource="yes">
<links>
<link href="../analyzers-common"/>
<link href="../queryparser"/>
<link href="../queries"/>
<link href="../facet"/>
<link href="../expressions"/>
</links>
</invoke-module-javadoc>
</target>
<target name="compile-core" depends="jar-analyzers-common,jar-queryparser,jar-facet,common.compile-core" />
<target name="compile-core" depends="jar-analyzers-common,jar-queryparser,jar-queries,jar-facet,jar-expressions,common.compile-core" />
<target name="default" depends="jar-core,build-web-demo"/>

View File

@ -0,0 +1,136 @@
package org.apache.lucene.demo.facet;
import java.io.IOException;
import java.text.ParseException;
import java.util.Collections;
import java.util.List;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.expressions.Expression;
import org.apache.lucene.expressions.SimpleBindings;
import org.apache.lucene.expressions.js.JavascriptCompiler;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetsCollector;
import org.apache.lucene.facet.search.SumValueSourceFacetRequest;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.SortField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Shows facets aggregation by an expression. */
public class ExpressionAggregationFacetsExample {
private final Directory indexDir = new RAMDirectory();
private final Directory taxoDir = new RAMDirectory();
/** Empty constructor */
public ExpressionAggregationFacetsExample() {}
private void add(IndexWriter indexWriter, FacetFields facetFields, String text, String category, long popularity) throws IOException {
Document doc = new Document();
doc.add(new TextField("c", text, Store.NO));
doc.add(new NumericDocValuesField("popularity", popularity));
facetFields.addFields(doc, Collections.singletonList(new CategoryPath(category, '/')));
indexWriter.addDocument(doc);
}
/** Build the example index. */
private void index() throws IOException {
IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(FacetExamples.EXAMPLES_VER,
new WhitespaceAnalyzer(FacetExamples.EXAMPLES_VER)));
// Writes facet ords to a separate directory from the main index
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
// Reused across documents, to add the necessary facet fields
FacetFields facetFields = new FacetFields(taxoWriter);
add(indexWriter, facetFields, "foo bar", "A/B", 5L);
add(indexWriter, facetFields, "foo foo bar", "A/C", 3L);
indexWriter.close();
taxoWriter.close();
}
/** User runs a query and aggregates facets. */
private List<FacetResult> search() throws IOException, ParseException {
DirectoryReader indexReader = DirectoryReader.open(indexDir);
IndexSearcher searcher = new IndexSearcher(indexReader);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
// Aggregate categories by an expression that combines the document's score
// and its popularity field
Expression expr = JavascriptCompiler.compile("_score * sqrt(popularity)");
SimpleBindings bindings = new SimpleBindings();
bindings.add(new SortField("_score", SortField.Type.SCORE)); // the score of the document
bindings.add(new SortField("popularity", SortField.Type.LONG)); // the value of the 'popularity' field
FacetSearchParams fsp = new FacetSearchParams(
new SumValueSourceFacetRequest(new CategoryPath("A"), 10, expr.getValueSource(bindings), true));
// Aggregates the facet values
FacetsCollector fc = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query, and use MultiCollector to
// wrap collecting the "normal" hits and also facets:
searcher.search(new MatchAllDocsQuery(), fc);
// Retrieve results
List<FacetResult> facetResults = fc.getFacetResults();
indexReader.close();
taxoReader.close();
return facetResults;
}
/** Runs the search example. */
public List<FacetResult> runSearch() throws IOException, ParseException {
index();
return search();
}
/** Runs the search and drill-down examples and prints the results. */
public static void main(String[] args) throws Exception {
System.out.println("Facet counting example:");
System.out.println("-----------------------");
List<FacetResult> results = new ExpressionAggregationFacetsExample().runSearch();
for (FacetResult res : results) {
System.out.println(res);
}
}
}

View File

@ -93,7 +93,7 @@ public class SimpleFacetsExample {
new CountFacetRequest(new CategoryPath("Publish Date"), 10),
new CountFacetRequest(new CategoryPath("Author"), 10));
// Aggregatses the facet counts
// Aggregates the facet counts
FacetsCollector fc = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);
// MatchAllDocsQuery is for "browsing" (counts facets

View File

@ -0,0 +1,49 @@
package org.apache.lucene.demo.facet;
import java.util.List;
import java.util.Locale;
import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Test;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class TestExpressionAggregationFacetsExample extends LuceneTestCase {
private static String toSimpleString(FacetResult fr) {
StringBuilder sb = new StringBuilder();
toSimpleString(fr.getFacetRequest().categoryPath.length, 0, sb, fr.getFacetResultNode(), "");
return sb.toString();
}
private static void toSimpleString(int startLength, int depth, StringBuilder sb, FacetResultNode node, String indent) {
sb.append(String.format(Locale.ROOT, "%s%s (%.3f)\n", indent, node.label.components[startLength + depth - 1], node.value));
for (FacetResultNode childNode : node.subResults) {
toSimpleString(startLength, depth + 1, sb, childNode, indent + " ");
}
}
@Test
public void testSimple() throws Exception {
List<FacetResult> facetResults = new ExpressionAggregationFacetsExample().runSearch();
assertEquals("A (0.000)\n B (2.236)\n C (1.732)\n", toSimpleString(facetResults.get(0)));
}
}

View File

@ -25,11 +25,26 @@
<import file="../module-build.xml"/>
<path id="classpath">
<path refid="base.classpath"/>
<pathelement path="${queries.jar}"/>
</path>
<target name="compile-core" depends="jar-queries,common.compile-core" />
<target name="javadocs" depends="javadocs-queries,compile-core">
<invoke-module-javadoc>
<links>
<link href="../queries"/>
</links>
</invoke-module-javadoc>
</target>
<target name="run-encoding-benchmark" depends="compile-test">
<java classname="org.apache.lucene.util.encoding.EncodingSpeed" fork="true" failonerror="true">
<classpath refid="test.classpath" />
<classpath path="${build.dir}/classes/test" />
</java>
</target>
</project>

View File

@ -0,0 +1,194 @@
package org.apache.lucene.facet.search;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.search.OrdinalValueResolver.FloatValueResolver;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.IntsRef;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A {@link FacetRequest} which aggregates categories by the sum of the values,
* returned by a {@link ValueSource}, in the documents they are associated with.
* This allows aggregating the value of a category by e.g. summing the value of
* a {@link NumericDocValuesField} indexed for the document, or a more complex
* expression (from multiple fields) using the expressions module.
*
* @lucene.experimental
*/
public class SumValueSourceFacetRequest extends FacetRequest {
private static abstract class SumValueSourceFacetsAggregator implements FacetsAggregator {
protected final ValueSource valueSource;
protected final IntsRef ordinals = new IntsRef(32);
protected SumValueSourceFacetsAggregator(ValueSource valueSource) {
this.valueSource = valueSource;
}
private float doRollup(int ordinal, int[] children, int[] siblings, float[] values) {
float value = 0f;
while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
float childValue = values[ordinal];
childValue += doRollup(children[ordinal], children, siblings, values);
values[ordinal] = childValue;
value += childValue;
ordinal = siblings[ordinal];
}
return value;
}
@Override
public void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays) {
float[] values = facetArrays.getFloatArray();
values[ordinal] += doRollup(children[ordinal], children, siblings, values);
}
@Override
public OrdinalValueResolver createOrdinalValueResolver(FacetRequest facetRequest, FacetArrays arrays) {
return new FloatValueResolver(arrays);
}
}
private static class ScoreValueSourceFacetsAggregator extends SumValueSourceFacetsAggregator {
private static final class FakeScorer extends Scorer {
float score;
int docID;
FakeScorer() { super(null); }
@Override public float score() throws IOException { return score; }
@Override public int freq() throws IOException { throw new UnsupportedOperationException(); }
@Override public int docID() { return docID; }
@Override public int nextDoc() throws IOException { throw new UnsupportedOperationException(); }
@Override public int advance(int target) throws IOException { throw new UnsupportedOperationException(); }
@Override public long cost() { return 0; }
}
ScoreValueSourceFacetsAggregator(ValueSource valueSource) {
super(valueSource);
}
@Override
public void aggregate(MatchingDocs matchingDocs, CategoryListParams clp, FacetArrays facetArrays) throws IOException {
final CategoryListIterator cli = clp.createCategoryListIterator(0);
if (!cli.setNextReader(matchingDocs.context)) {
return;
}
assert matchingDocs.scores != null;
final FakeScorer scorer = new FakeScorer();
Map<String, Scorer> context = new HashMap<String, Scorer>();
context.put("scorer", scorer);
final FunctionValues fvalues = valueSource.getValues(context, matchingDocs.context);
final int length = matchingDocs.bits.length();
final float[] aggValues = facetArrays.getFloatArray();
int doc = 0;
int scoresIdx = 0;
while (doc < length && (doc = matchingDocs.bits.nextSetBit(doc)) != -1) {
scorer.docID = doc;
scorer.score = matchingDocs.scores[scoresIdx++];
cli.getOrdinals(doc, ordinals);
final int upto = ordinals.offset + ordinals.length;
float val = (float) fvalues.doubleVal(doc);
for (int i = ordinals.offset; i < upto; i++) {
aggValues[ordinals.ints[i]] += val;
}
++doc;
}
}
@Override
public boolean requiresDocScores() {
return true;
}
}
private static class NoScoreValueSourceFacetsAggregator extends SumValueSourceFacetsAggregator {
NoScoreValueSourceFacetsAggregator(ValueSource valueSource) {
super(valueSource);
}
@Override
public void aggregate(MatchingDocs matchingDocs, CategoryListParams clp, FacetArrays facetArrays) throws IOException {
final CategoryListIterator cli = clp.createCategoryListIterator(0);
if (!cli.setNextReader(matchingDocs.context)) {
return;
}
final FunctionValues fvalues = valueSource.getValues(Collections.emptyMap(), matchingDocs.context);
final int length = matchingDocs.bits.length();
final float[] aggValues = facetArrays.getFloatArray();
int doc = 0;
while (doc < length && (doc = matchingDocs.bits.nextSetBit(doc)) != -1) {
cli.getOrdinals(doc, ordinals);
final int upto = ordinals.offset + ordinals.length;
float val = (float) fvalues.doubleVal(doc);
for (int i = ordinals.offset; i < upto; i++) {
aggValues[ordinals.ints[i]] += val;
}
++doc;
}
}
@Override
public boolean requiresDocScores() {
return false;
}
}
private final ValueSource valueSource;
private final boolean requiresDocScores;
/**
* Constructor which takes the {@link ValueSource} from which to read the
* documents' values. You can also specify if the value source requires
* document scores or not.
*/
public SumValueSourceFacetRequest(CategoryPath path, int num, ValueSource valueSource, boolean requiresDocScores) {
super(path, num);
this.valueSource = valueSource;
this.requiresDocScores = requiresDocScores;
}
@Override
public FacetsAggregator createFacetsAggregator(FacetIndexingParams fip) {
if (requiresDocScores) {
return new ScoreValueSourceFacetsAggregator(valueSource);
} else {
return new NoScoreValueSourceFacetsAggregator(valueSource);
}
}
}

View File

@ -0,0 +1,185 @@
package org.apache.lucene.facet.search;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.facet.FacetTestCase;
import org.apache.lucene.facet.FacetTestUtils;
import org.apache.lucene.facet.index.FacetFields;
import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.taxonomy.CategoryPath;
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.IOUtils;
import org.junit.Test;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class TestSumValueSourceFacetRequest extends FacetTestCase {
@Test
public void testNoScore() throws Exception {
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
FacetFields facetFields = new FacetFields(taxonomyWriter);
for (int i = 0; i < 4; i++) {
Document doc = new Document();
doc.add(new NumericDocValuesField("price", (i+1)));
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("a", Integer.toString(i % 2))));
iw.addDocument(doc);
}
taxonomyWriter.close();
iw.close();
DirectoryReader r = DirectoryReader.open(indexDir);
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
ValueSource valueSource = new LongFieldSource("price");
FacetSearchParams fsp = new FacetSearchParams(new SumValueSourceFacetRequest(new CategoryPath("a"), 10, valueSource, false));
FacetsCollector fc = FacetsCollector.create(fsp, r, taxo);
newSearcher(r).search(new MatchAllDocsQuery(), fc);
List<FacetResult> res = fc.getFacetResults();
assertEquals("a (0)\n 1 (6)\n 0 (4)\n", FacetTestUtils.toSimpleString(res.get(0)));
IOUtils.close(taxo, taxoDir, r, indexDir);
}
@Test
public void testWithScore() throws Exception {
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
FacetFields facetFields = new FacetFields(taxonomyWriter);
for (int i = 0; i < 4; i++) {
Document doc = new Document();
doc.add(new NumericDocValuesField("price", (i+1)));
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("a", Integer.toString(i % 2))));
iw.addDocument(doc);
}
taxonomyWriter.close();
iw.close();
DirectoryReader r = DirectoryReader.open(indexDir);
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
ValueSource valueSource = new ValueSource() {
@Override
public FunctionValues getValues(@SuppressWarnings("rawtypes") Map context, AtomicReaderContext readerContext) throws IOException {
final Scorer scorer = (Scorer) context.get("scorer");
assert scorer != null;
return new DoubleDocValues(this) {
@Override
public double doubleVal(int document) {
try {
return scorer.score();
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
};
}
@Override public boolean equals(Object o) { return o == this; }
@Override public int hashCode() { return System.identityHashCode(this); }
@Override public String description() { return "score()"; }
};
FacetSearchParams fsp = new FacetSearchParams(new SumValueSourceFacetRequest(new CategoryPath("a"), 10, valueSource, true));
FacetsCollector fc = FacetsCollector.create(fsp, r, taxo);
TopScoreDocCollector tsdc = TopScoreDocCollector.create(10, true);
// score documents by their 'price' field - makes asserting the correct counts for the categories easier
Query q = new FunctionQuery(new LongFieldSource("price"));
newSearcher(r).search(q, MultiCollector.wrap(tsdc, fc));
List<FacetResult> res = fc.getFacetResults();
assertEquals("a (0)\n 1 (6)\n 0 (4)\n", FacetTestUtils.toSimpleString(res.get(0)));
IOUtils.close(taxo, taxoDir, r, indexDir);
}
@Test
public void testRollupValues() throws Exception {
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
TaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
FacetIndexingParams fip = new FacetIndexingParams(new CategoryListParams() {
@Override
public OrdinalPolicy getOrdinalPolicy(String dimension) {
return OrdinalPolicy.NO_PARENTS;
}
});
FacetFields facetFields = new FacetFields(taxonomyWriter, fip);
for (int i = 0; i < 4; i++) {
Document doc = new Document();
doc.add(new NumericDocValuesField("price", (i+1)));
facetFields.addFields(doc, Collections.singletonList(new CategoryPath("a", Integer.toString(i % 2), "1")));
iw.addDocument(doc);
}
taxonomyWriter.close();
iw.close();
DirectoryReader r = DirectoryReader.open(indexDir);
DirectoryTaxonomyReader taxo = new DirectoryTaxonomyReader(taxoDir);
ValueSource valueSource = new LongFieldSource("price");
FacetSearchParams fsp = new FacetSearchParams(fip, new SumValueSourceFacetRequest(new CategoryPath("a"), 10, valueSource, false));
FacetsCollector fc = FacetsCollector.create(fsp, r, taxo);
newSearcher(r).search(new MatchAllDocsQuery(), fc);
List<FacetResult> res = fc.getFacetResults();
assertEquals("a (10)\n 1 (6)\n 0 (4)\n", FacetTestUtils.toSimpleString(res.get(0)));
IOUtils.close(taxo, taxoDir, r, indexDir);
}
}