LUCENE-7610: Remove deprecated facet ValueSource methods

This commit is contained in:
Alan Woodward 2017-01-07 12:25:15 +00:00
parent e5f39f62f7
commit ce8b678ba1
7 changed files with 13 additions and 182 deletions

View File

@ -21,7 +21,6 @@ import java.util.Objects;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.ConstantScoreWeight;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
@ -177,14 +176,6 @@ public final class DoubleRange extends Range {
} }
/**
* @deprecated Use {@link #getQuery(Query, DoubleValuesSource)}
*/
@Deprecated
public Query getQuery(final Query fastMatchQuery, final ValueSource valueSource) {
return new ValueSourceQuery(this, fastMatchQuery, valueSource.asDoubleValuesSource());
}
/** /**
* Create a Query that matches documents in this range * Create a Query that matches documents in this range
* *

View File

@ -25,7 +25,6 @@ import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsCollector.MatchingDocs; import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.DoubleValues;
import org.apache.lucene.search.DoubleValuesSource; import org.apache.lucene.search.DoubleValuesSource;
@ -60,15 +59,6 @@ public class DoubleRangeFacetCounts extends RangeFacetCounts {
this(field, DoubleValuesSource.fromDoubleField(field), hits, ranges); this(field, DoubleValuesSource.fromDoubleField(field), hits, ranges);
} }
/**
* Create {@code RangeFacetCounts}, using the provided {@link ValueSource}.
*
* @deprecated Use {@link #DoubleRangeFacetCounts(String, DoubleValuesSource, FacetsCollector, DoubleRange...)}
* */
public DoubleRangeFacetCounts(String field, ValueSource valueSource, FacetsCollector hits, DoubleRange... ranges) throws IOException {
this(field, valueSource, hits, null, ranges);
}
/** /**
* Create {@code RangeFacetCounts} using the provided {@link DoubleValuesSource} * Create {@code RangeFacetCounts} using the provided {@link DoubleValuesSource}
*/ */
@ -76,19 +66,6 @@ public class DoubleRangeFacetCounts extends RangeFacetCounts {
this(field, valueSource, hits, null, ranges); this(field, valueSource, hits, null, ranges);
} }
/**
* Create {@code RangeFacetCounts}, using the provided
* {@link ValueSource}, and using the provided Query as
* a fastmatch: only documents matching the query are
* checked for the matching ranges.
*
* @deprecated Use ({@link #DoubleRangeFacetCounts(String, DoubleValuesSource, FacetsCollector, Query, DoubleRange...)}
*/
@Deprecated
public DoubleRangeFacetCounts(String field, ValueSource valueSource, FacetsCollector hits, Query fastMatchQuery, DoubleRange... ranges) throws IOException {
this(field, valueSource.asDoubleValuesSource(), hits, fastMatchQuery, ranges);
}
/** /**
* Create {@code RangeFacetCounts}, using the provided * Create {@code RangeFacetCounts}, using the provided
* {@link DoubleValuesSource}, and using the provided Query as * {@link DoubleValuesSource}, and using the provided Query as

View File

@ -21,7 +21,6 @@ import java.util.Objects;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.ConstantScoreWeight;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
@ -169,15 +168,6 @@ public final class LongRange extends Range {
} }
/**
* @deprecated Use {@link #getQuery(Query, LongValuesSource)}
*/
@Deprecated
public Query getQuery(final Query fastMatchQuery, final ValueSource valueSource) {
return new ValueSourceQuery(this, fastMatchQuery, valueSource.asLongValuesSource());
}
/** /**
* Create a Query that matches documents in this range * Create a Query that matches documents in this range
* *

View File

@ -51,33 +51,12 @@ public class LongRangeFacetCounts extends RangeFacetCounts {
this(field, LongValuesSource.fromLongField(field), hits, ranges); this(field, LongValuesSource.fromLongField(field), hits, ranges);
} }
/**
* Create {@code RangeFacetCounts}, using the provided {@link ValueSource}.
*
* @deprecated Use {@link #LongRangeFacetCounts(String, LongValuesSource, FacetsCollector, LongRange...)}
*/
@Deprecated
public LongRangeFacetCounts(String field, ValueSource valueSource, FacetsCollector hits, LongRange... ranges) throws IOException {
this(field, valueSource.asLongValuesSource(), hits, null, ranges);
}
/** Create {@code RangeFacetCounts}, using the provided /** Create {@code RangeFacetCounts}, using the provided
* {@link ValueSource}. */ * {@link ValueSource}. */
public LongRangeFacetCounts(String field, LongValuesSource valueSource, FacetsCollector hits, LongRange... ranges) throws IOException { public LongRangeFacetCounts(String field, LongValuesSource valueSource, FacetsCollector hits, LongRange... ranges) throws IOException {
this(field, valueSource, hits, null, ranges); this(field, valueSource, hits, null, ranges);
} }
/**
* Create {@code RangeFacetCounts}, using the provided {@link ValueSource}.
*
* @deprecated Use {@link #LongRangeFacetCounts(String, LongValuesSource, FacetsCollector, Query, LongRange...)}
*/
@Deprecated
public LongRangeFacetCounts(String field, ValueSource valueSource, FacetsCollector hits, Query fastMatchQuery, LongRange... ranges) throws IOException {
this(field, valueSource.asLongValuesSource(), hits, fastMatchQuery, ranges);
}
/** Create {@code RangeFacetCounts}, using the provided /** Create {@code RangeFacetCounts}, using the provided
* {@link ValueSource}, and using the provided Filter as * {@link ValueSource}, and using the provided Filter as
* a fastmatch: only documents passing the filter are * a fastmatch: only documents passing the filter are

View File

@ -18,19 +18,14 @@ package org.apache.lucene.facet.taxonomy;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Map;
import org.apache.lucene.facet.FacetsCollector; import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsCollector.MatchingDocs; import org.apache.lucene.facet.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.FacetsConfig; import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.DoubleValues;
import org.apache.lucene.search.DoubleValuesSource; import org.apache.lucene.search.DoubleValuesSource;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRef;
/** Aggregates sum of values from {@link /** Aggregates sum of values from {@link
@ -40,21 +35,6 @@ import org.apache.lucene.util.IntsRef;
public class TaxonomyFacetSumValueSource extends FloatTaxonomyFacets { public class TaxonomyFacetSumValueSource extends FloatTaxonomyFacets {
private final OrdinalsReader ordinalsReader; private final OrdinalsReader ordinalsReader;
/**
* Aggreggates double facet values from the provided
* {@link ValueSource}, pulling ordinals using {@link
* DocValuesOrdinalsReader} against the default indexed
* facet field {@link
* FacetsConfig#DEFAULT_INDEX_FIELD_NAME}.
*
* @deprecated {@link #TaxonomyFacetSumValueSource(TaxonomyReader, FacetsConfig, FacetsCollector, DoubleValuesSource)}
*/
@Deprecated
public TaxonomyFacetSumValueSource(TaxonomyReader taxoReader, FacetsConfig config,
FacetsCollector fc, ValueSource valueSource) throws IOException {
this(new DocValuesOrdinalsReader(FacetsConfig.DEFAULT_INDEX_FIELD_NAME), taxoReader, config, fc, valueSource);
}
/** /**
* Aggreggates double facet values from the provided * Aggreggates double facet values from the provided
* {@link DoubleValuesSource}, pulling ordinals using {@link * {@link DoubleValuesSource}, pulling ordinals using {@link
@ -66,21 +46,6 @@ public class TaxonomyFacetSumValueSource extends FloatTaxonomyFacets {
this(new DocValuesOrdinalsReader(FacetsConfig.DEFAULT_INDEX_FIELD_NAME), taxoReader, config, fc, valueSource); this(new DocValuesOrdinalsReader(FacetsConfig.DEFAULT_INDEX_FIELD_NAME), taxoReader, config, fc, valueSource);
} }
/**
* Aggreggates float facet values from the provided
* {@link ValueSource}, and pulls ordinals from the
* provided {@link OrdinalsReader}.
*
* @deprecated use {@link #TaxonomyFacetSumValueSource(OrdinalsReader, TaxonomyReader, FacetsConfig, FacetsCollector, DoubleValuesSource)}
*/
@Deprecated
public TaxonomyFacetSumValueSource(OrdinalsReader ordinalsReader, TaxonomyReader taxoReader,
FacetsConfig config, FacetsCollector fc, ValueSource valueSource) throws IOException {
super(ordinalsReader.getIndexFieldName(), taxoReader, config);
this.ordinalsReader = ordinalsReader;
sumValues(fc.getMatchingDocs(), fc.getKeepScores(), valueSource.asDoubleValuesSource());
}
/** /**
* Aggreggates float facet values from the provided * Aggreggates float facet values from the provided
* {@link DoubleValuesSource}, and pulls ordinals from the * {@link DoubleValuesSource}, and pulls ordinals from the
@ -135,40 +100,4 @@ public class TaxonomyFacetSumValueSource extends FloatTaxonomyFacets {
rollup(); rollup();
} }
/**
* {@link ValueSource} that returns the score for each
* hit; use this to aggregate the sum of all hit scores
* for each facet label.
*
* @deprecated Use {@link DoubleValuesSource#SCORES}
*/
public static class ScoreValueSource extends ValueSource {
/** Sole constructor. */
public ScoreValueSource() {
}
@Override
public FunctionValues getValues(@SuppressWarnings("rawtypes") Map context, LeafReaderContext readerContext) throws IOException {
final Scorer scorer = (Scorer) context.get("scorer");
if (scorer == null) {
throw new IllegalStateException("scores are missing; be sure to pass keepScores=true to FacetsCollector");
}
return new DoubleDocValues(this) {
@Override
public double doubleVal(int document) {
try {
return scorer.score();
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
};
}
@Override public boolean equals(Object o) { return o == this; }
@Override public int hashCode() { return System.identityHashCode(this); }
@Override public String description() { return "score()"; }
}
} }

View File

@ -46,13 +46,11 @@ import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.DoubleFieldSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.DoubleValues;
import org.apache.lucene.search.DoubleValuesSource; import org.apache.lucene.search.DoubleValuesSource;
import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LongValuesSource;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
@ -437,7 +435,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
} else { } else {
fastMatchQuery = null; fastMatchQuery = null;
} }
ValueSource vs = new LongFieldSource("field"); LongValuesSource vs = LongValuesSource.fromLongField("field");
Facets facets = new LongRangeFacetCounts("field", vs, sfc, fastMatchQuery, ranges); Facets facets = new LongRangeFacetCounts("field", vs, sfc, fastMatchQuery, ranges);
FacetResult result = facets.getTopChildren(10, "field"); FacetResult result = facets.getTopChildren(10, "field");
assertEquals(numRange, result.labelValues.length); assertEquals(numRange, result.labelValues.length);
@ -580,7 +578,7 @@ public class TestRangeFacetCounts extends FacetTestCase {
} else { } else {
fastMatchFilter = null; fastMatchFilter = null;
} }
ValueSource vs = new DoubleFieldSource("field"); DoubleValuesSource vs = DoubleValuesSource.fromDoubleField("field");
Facets facets = new DoubleRangeFacetCounts("field", vs, sfc, fastMatchFilter, ranges); Facets facets = new DoubleRangeFacetCounts("field", vs, sfc, fastMatchFilter, ranges);
FacetResult result = facets.getTopChildren(10, "field"); FacetResult result = facets.getTopChildren(10, "field");
assertEquals(numRange, result.labelValues.length); assertEquals(numRange, result.labelValues.length);

View File

@ -16,7 +16,6 @@
*/ */
package org.apache.lucene.facet.taxonomy; package org.apache.lucene.facet.taxonomy;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -37,18 +36,12 @@ import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.LabelAndValue; import org.apache.lucene.facet.LabelAndValue;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.queries.function.FunctionQuery; import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.apache.lucene.queries.function.valuesource.FloatFieldSource;
import org.apache.lucene.queries.function.valuesource.IntFieldSource;
import org.apache.lucene.queries.function.valuesource.LongFieldSource; import org.apache.lucene.queries.function.valuesource.LongFieldSource;
import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreQuery;
@ -56,7 +49,6 @@ import org.apache.lucene.search.DoubleValuesSource;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
@ -121,7 +113,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
// Facets.search utility methods: // Facets.search utility methods:
searcher.search(new MatchAllDocsQuery(), c); searcher.search(new MatchAllDocsQuery(), c);
TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, new FacetsConfig(), c, new IntFieldSource("num")); TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, new FacetsConfig(), c, DoubleValuesSource.fromIntField("num"));
// Retrieve & verify results: // Retrieve & verify results:
assertEquals("dim=Author path=[] value=145.0 childCount=4\n Lisa (50.0)\n Frank (45.0)\n Susan (40.0)\n Bob (10.0)\n", facets.getTopChildren(10, "Author").toString()); assertEquals("dim=Author path=[] value=145.0 childCount=4\n Lisa (50.0)\n Frank (45.0)\n Susan (40.0)\n Bob (10.0)\n", facets.getTopChildren(10, "Author").toString());
@ -181,7 +173,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
FacetsCollector c = new FacetsCollector(); FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c); searcher.search(new MatchAllDocsQuery(), c);
TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, new FacetsConfig(), c, new IntFieldSource("num")); TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, new FacetsConfig(), c, DoubleValuesSource.fromIntField("num"));
// Ask for top 10 labels for any dims that have counts: // Ask for top 10 labels for any dims that have counts:
List<FacetResult> results = facets.getAllDims(10); List<FacetResult> results = facets.getAllDims(10);
@ -224,7 +216,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
FacetsCollector c = new FacetsCollector(); FacetsCollector c = new FacetsCollector();
searcher.search(new MatchAllDocsQuery(), c); searcher.search(new MatchAllDocsQuery(), c);
TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, config, c, new IntFieldSource("num")); TaxonomyFacetSumValueSource facets = new TaxonomyFacetSumValueSource(taxoReader, config, c, DoubleValuesSource.fromIntField("num"));
// Ask for top 10 labels for any dims that have counts: // Ask for top 10 labels for any dims that have counts:
List<FacetResult> results = facets.getAllDims(10); List<FacetResult> results = facets.getAllDims(10);
@ -295,7 +287,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
FacetsCollector sfc = new FacetsCollector(); FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc); newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, new LongFieldSource("price")); Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, DoubleValuesSource.fromLongField("price"));
assertEquals("dim=a path=[] value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString()); assertEquals("dim=a path=[] value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString());
iw.close(); iw.close();
@ -320,33 +312,11 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
ValueSource valueSource = new ValueSource() {
@Override
public FunctionValues getValues(@SuppressWarnings("rawtypes") Map context, LeafReaderContext readerContext) throws IOException {
final Scorer scorer = (Scorer) context.get("scorer");
assert scorer != null;
return new DoubleDocValues(this) {
@Override
public double doubleVal(int document) {
try {
return scorer.score();
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
};
}
@Override public boolean equals(Object o) { return o == this; }
@Override public int hashCode() { return System.identityHashCode(this); }
@Override public String description() { return "score()"; }
};
FacetsCollector fc = new FacetsCollector(true); FacetsCollector fc = new FacetsCollector(true);
// score documents by their 'price' field - makes asserting the correct counts for the categories easier // score documents by their 'price' field - makes asserting the correct counts for the categories easier
Query q = new FunctionQuery(new LongFieldSource("price")); Query q = new FunctionQuery(new LongFieldSource("price"));
FacetsCollector.search(newSearcher(r), q, 10, fc); FacetsCollector.search(newSearcher(r), q, 10, fc);
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, valueSource); Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, DoubleValuesSource.SCORES);
assertEquals("dim=a path=[] value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString()); assertEquals("dim=a path=[] value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString());
@ -374,10 +344,9 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
DirectoryReader r = DirectoryReader.open(iw); DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
ValueSource valueSource = new LongFieldSource("price");
FacetsCollector sfc = new FacetsCollector(); FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc); newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, valueSource); Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, sfc, DoubleValuesSource.fromLongField("price"));
assertEquals("dim=a path=[] value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString()); assertEquals("dim=a path=[] value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString());
@ -447,8 +416,6 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
// NRT open // NRT open
TaxonomyReader tr = new DirectoryTaxonomyReader(tw); TaxonomyReader tr = new DirectoryTaxonomyReader(tw);
ValueSource values = new FloatFieldSource("value");
int iters = atLeast(100); int iters = atLeast(100);
for(int iter=0;iter<iters;iter++) { for(int iter=0;iter<iters;iter++) {
String searchToken = tokens[random().nextInt(tokens.length)]; String searchToken = tokens[random().nextInt(tokens.length)];
@ -457,7 +424,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
} }
FacetsCollector fc = new FacetsCollector(); FacetsCollector fc = new FacetsCollector();
FacetsCollector.search(searcher, new TermQuery(new Term("content", searchToken)), 10, fc); FacetsCollector.search(searcher, new TermQuery(new Term("content", searchToken)), 10, fc);
Facets facets = new TaxonomyFacetSumValueSource(tr, config, fc, values); Facets facets = new TaxonomyFacetSumValueSource(tr, config, fc, DoubleValuesSource.fromFloatField("value"));
// Slow, yet hopefully bug-free, faceting: // Slow, yet hopefully bug-free, faceting:
@SuppressWarnings({"rawtypes","unchecked"}) Map<String,Float>[] expectedValues = new HashMap[numDims]; @SuppressWarnings({"rawtypes","unchecked"}) Map<String,Float>[] expectedValues = new HashMap[numDims];
@ -473,7 +440,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase {
if (v == null) { if (v == null) {
expectedValues[j].put(doc.dims[j], doc.value); expectedValues[j].put(doc.dims[j], doc.value);
} else { } else {
expectedValues[j].put(doc.dims[j], v.floatValue() + doc.value); expectedValues[j].put(doc.dims[j], v + doc.value);
} }
} }
} }