LUCENE-5155: add OrdinalValueResolver

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1509152 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Shai Erera 2013-08-01 10:01:38 +00:00
parent e5045d5538
commit 96c161f11f
30 changed files with 250 additions and 364 deletions

View File

@ -86,7 +86,11 @@ New features
* LUCENE-5153: AnalyzerWrapper.wrapReader allows wrapping the Reader given to * LUCENE-5153: AnalyzerWrapper.wrapReader allows wrapping the Reader given to
inputReader. (Shai Erera) inputReader. (Shai Erera)
* LUCENE-5155: FacetRequest.getValueOf and .getFacetArraysSource replaced by
FacetsAggregator.createOrdinalValueResolver. This gives better options for
resolving an ordinal's value by FacetAggregators. (Shai Erera)
Bug Fixes Bug Fixes
* LUCENE-5116: IndexWriter.addIndexes(IndexReader...) should drop empty (or all * LUCENE-5116: IndexWriter.addIndexes(IndexReader...) should drop empty (or all

View File

@ -1,7 +1,6 @@
package org.apache.lucene.facet.associations; package org.apache.lucene.facet.associations;
import org.apache.lucene.facet.params.FacetIndexingParams; import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetArrays;
import org.apache.lucene.facet.search.FacetRequest; import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetsAggregator; import org.apache.lucene.facet.search.FacetsAggregator;
import org.apache.lucene.facet.taxonomy.CategoryPath; import org.apache.lucene.facet.taxonomy.CategoryPath;
@ -44,14 +43,4 @@ public class SumFloatAssociationFacetRequest extends FacetRequest {
return new SumFloatAssociationFacetsAggregator(); return new SumFloatAssociationFacetsAggregator();
} }
@Override
public double getValueOf(FacetArrays arrays, int ordinal) {
return arrays.getFloatArray()[ordinal];
}
@Override
public FacetArraysSource getFacetArraysSource() {
return FacetArraysSource.FLOAT;
}
} }

View File

@ -7,6 +7,8 @@ import org.apache.lucene.facet.search.FacetArrays;
import org.apache.lucene.facet.search.FacetRequest; import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetsAggregator; import org.apache.lucene.facet.search.FacetsAggregator;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs; import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.search.OrdinalValueResolver;
import org.apache.lucene.facet.search.OrdinalValueResolver.FloatValueResolver;
import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -81,5 +83,10 @@ public class SumFloatAssociationFacetsAggregator implements FacetsAggregator {
public void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays) { public void rollupValues(FacetRequest fr, int ordinal, int[] children, int[] siblings, FacetArrays facetArrays) {
// NO-OP: this aggregator does no rollup values to the parents. // NO-OP: this aggregator does no rollup values to the parents.
} }
@Override
public OrdinalValueResolver createOrdinalValueResolver(FacetRequest facetRequest, FacetArrays arrays) {
return new FloatValueResolver(arrays);
}
} }

View File

@ -1,7 +1,6 @@
package org.apache.lucene.facet.associations; package org.apache.lucene.facet.associations;
import org.apache.lucene.facet.params.FacetIndexingParams; import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetArrays;
import org.apache.lucene.facet.search.FacetRequest; import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetsAggregator; import org.apache.lucene.facet.search.FacetsAggregator;
import org.apache.lucene.facet.taxonomy.CategoryPath; import org.apache.lucene.facet.taxonomy.CategoryPath;
@ -44,14 +43,4 @@ public class SumIntAssociationFacetRequest extends FacetRequest {
return new SumIntAssociationFacetsAggregator(); return new SumIntAssociationFacetsAggregator();
} }
@Override
public FacetArraysSource getFacetArraysSource() {
return FacetArraysSource.INT;
}
@Override
public double getValueOf(FacetArrays arrays, int ordinal) {
return arrays.getIntArray()[ordinal];
}
} }

View File

@ -7,6 +7,8 @@ import org.apache.lucene.facet.search.FacetArrays;
import org.apache.lucene.facet.search.FacetRequest; import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetsAggregator; import org.apache.lucene.facet.search.FacetsAggregator;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs; import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.search.OrdinalValueResolver;
import org.apache.lucene.facet.search.OrdinalValueResolver.IntValueResolver;
import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -81,4 +83,9 @@ public class SumIntAssociationFacetsAggregator implements FacetsAggregator {
// NO-OP: this aggregator does no rollup values to the parents. // NO-OP: this aggregator does no rollup values to the parents.
} }
@Override
public OrdinalValueResolver createOrdinalValueResolver(FacetRequest facetRequest, FacetArrays arrays) {
return new IntValueResolver(arrays);
}
} }

View File

@ -24,6 +24,9 @@ import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetsAccumulator; import org.apache.lucene.facet.search.FacetsAccumulator;
import org.apache.lucene.facet.search.FacetsAggregator; import org.apache.lucene.facet.search.FacetsAggregator;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs; import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.search.OrdinalValueResolver;
import org.apache.lucene.facet.search.OrdinalValueResolver.FloatValueResolver;
import org.apache.lucene.facet.search.OrdinalValueResolver.IntValueResolver;
import org.apache.lucene.facet.search.SumScoreFacetRequest; import org.apache.lucene.facet.search.SumScoreFacetRequest;
import org.apache.lucene.facet.search.TaxonomyFacetsAccumulator; import org.apache.lucene.facet.search.TaxonomyFacetsAccumulator;
import org.apache.lucene.facet.search.TopKFacetResultsHandler; import org.apache.lucene.facet.search.TopKFacetResultsHandler;
@ -172,7 +175,7 @@ public class OldFacetsAccumulator extends TaxonomyFacetsAccumulator {
for (FacetRequest fr : searchParams.facetRequests) { for (FacetRequest fr : searchParams.facetRequests) {
// Handle and merge only facet requests which were not already handled. // Handle and merge only facet requests which were not already handled.
if (handledRequests.add(fr)) { if (handledRequests.add(fr)) {
PartitionsFacetResultsHandler frHndlr = createFacetResultsHandler(fr); PartitionsFacetResultsHandler frHndlr = createFacetResultsHandler(fr, createOrdinalValueResolver(fr));
IntermediateFacetResult res4fr = frHndlr.fetchPartitionResult(offset); IntermediateFacetResult res4fr = frHndlr.fetchPartitionResult(offset);
IntermediateFacetResult oldRes = fr2tmpRes.get(fr); IntermediateFacetResult oldRes = fr2tmpRes.get(fr);
if (oldRes != null) { if (oldRes != null) {
@ -189,7 +192,7 @@ public class OldFacetsAccumulator extends TaxonomyFacetsAccumulator {
// gather results from all requests into a list for returning them // gather results from all requests into a list for returning them
List<FacetResult> res = new ArrayList<FacetResult>(); List<FacetResult> res = new ArrayList<FacetResult>();
for (FacetRequest fr : searchParams.facetRequests) { for (FacetRequest fr : searchParams.facetRequests) {
PartitionsFacetResultsHandler frHndlr = createFacetResultsHandler(fr); PartitionsFacetResultsHandler frHndlr = createFacetResultsHandler(fr, createOrdinalValueResolver(fr));
IntermediateFacetResult tmpResult = fr2tmpRes.get(fr); IntermediateFacetResult tmpResult = fr2tmpRes.get(fr);
if (tmpResult == null) { if (tmpResult == null) {
// Add empty FacetResult: // Add empty FacetResult:
@ -217,11 +220,11 @@ public class OldFacetsAccumulator extends TaxonomyFacetsAccumulator {
} }
@Override @Override
public PartitionsFacetResultsHandler createFacetResultsHandler(FacetRequest fr) { public PartitionsFacetResultsHandler createFacetResultsHandler(FacetRequest fr, OrdinalValueResolver resolver) {
if (fr.getResultMode() == ResultMode.PER_NODE_IN_TREE) { if (fr.getResultMode() == ResultMode.PER_NODE_IN_TREE) {
return new TopKInEachNodeHandler(taxonomyReader, fr, facetArrays); return new TopKInEachNodeHandler(taxonomyReader, fr, resolver, facetArrays);
} else { } else {
return new TopKFacetResultsHandler(taxonomyReader, fr, facetArrays); return new TopKFacetResultsHandler(taxonomyReader, fr, resolver, facetArrays);
} }
} }
@ -246,6 +249,24 @@ public class OldFacetsAccumulator extends TaxonomyFacetsAccumulator {
return mayComplement() && (docids.size() > indexReader.numDocs() * getComplementThreshold()) ; return mayComplement() && (docids.size() > indexReader.numDocs() * getComplementThreshold()) ;
} }
/**
* Creates an {@link OrdinalValueResolver} for the given {@link FacetRequest}.
* By default this method supports {@link CountFacetRequest} and
* {@link SumScoreFacetRequest}. You should override if you are using other
* requests with this accumulator.
*/
public OrdinalValueResolver createOrdinalValueResolver(FacetRequest fr) {
if (fr instanceof CountFacetRequest) {
return new IntValueResolver(facetArrays);
} else if (fr instanceof SumScoreFacetRequest) {
return new FloatValueResolver(facetArrays);
} else if (fr instanceof OverSampledFacetRequest) {
return createOrdinalValueResolver(((OverSampledFacetRequest) fr).orig);
} else {
throw new IllegalArgumentException("unrecognized FacetRequest " + fr.getClass());
}
}
/** /**
* Iterate over the documents for this partition and fill the facet arrays with the correct * Iterate over the documents for this partition and fill the facet arrays with the correct
* count/complement count/value. * count/complement count/value.
@ -344,7 +365,7 @@ public class OldFacetsAccumulator extends TaxonomyFacetsAccumulator {
} }
} else if (fr instanceof SumScoreFacetRequest) { } else if (fr instanceof SumScoreFacetRequest) {
if (isUsingComplements) { if (isUsingComplements) {
throw new IllegalArgumentException("complements are not supported by this SumScoreFacetRequest"); throw new IllegalArgumentException("complements are not supported by SumScoreFacetRequest");
} else { } else {
return new ScoringAggregator(facetArrays.getFloatArray()); return new ScoringAggregator(facetArrays.getFloatArray());
} }

View File

@ -9,6 +9,7 @@ import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetResult; import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode; import org.apache.lucene.facet.search.FacetResultNode;
import org.apache.lucene.facet.search.FacetResultsHandler; import org.apache.lucene.facet.search.FacetResultsHandler;
import org.apache.lucene.facet.search.OrdinalValueResolver;
import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyReader;
/* /*
@ -36,11 +37,10 @@ import org.apache.lucene.facet.taxonomy.TaxonomyReader;
public abstract class PartitionsFacetResultsHandler extends FacetResultsHandler { public abstract class PartitionsFacetResultsHandler extends FacetResultsHandler {
public PartitionsFacetResultsHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, public PartitionsFacetResultsHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest,
FacetArrays facetArrays) { OrdinalValueResolver resolver, FacetArrays facetArrays) {
super(taxonomyReader, facetRequest, facetArrays); super(taxonomyReader, facetRequest, resolver, facetArrays);
} }
/** /**
* Fetch results of a single partition, given facet arrays for that partition, * Fetch results of a single partition, given facet arrays for that partition,
* and based on the matching documents and faceted search parameters. * and based on the matching documents and faceted search parameters.

View File

@ -20,7 +20,6 @@ package org.apache.lucene.facet.range;
import java.util.List; import java.util.List;
import org.apache.lucene.facet.params.FacetIndexingParams; import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.search.FacetArrays;
import org.apache.lucene.facet.search.FacetRequest; import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetsAggregator; import org.apache.lucene.facet.search.FacetsAggregator;
import org.apache.lucene.facet.taxonomy.CategoryPath; import org.apache.lucene.facet.taxonomy.CategoryPath;
@ -52,14 +51,4 @@ public class RangeFacetRequest<T extends Range> extends FacetRequest {
return null; return null;
} }
@Override
public double getValueOf(FacetArrays arrays, int ordinal) {
throw new UnsupportedOperationException();
}
@Override
public FacetArraysSource getFacetArraysSource() {
throw new UnsupportedOperationException();
}
} }

View File

@ -7,7 +7,6 @@ import java.util.List;
import org.apache.lucene.facet.old.ScoredDocIDs; import org.apache.lucene.facet.old.ScoredDocIDs;
import org.apache.lucene.facet.params.FacetIndexingParams; import org.apache.lucene.facet.params.FacetIndexingParams;
import org.apache.lucene.facet.params.FacetSearchParams; import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.FacetArrays;
import org.apache.lucene.facet.search.FacetRequest; import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetResult; import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetResultNode; import org.apache.lucene.facet.search.FacetResultNode;
@ -212,16 +211,6 @@ public abstract class Sampler {
public FacetsAggregator createFacetsAggregator(FacetIndexingParams fip) { public FacetsAggregator createFacetsAggregator(FacetIndexingParams fip) {
return orig.createFacetsAggregator(fip); return orig.createFacetsAggregator(fip);
} }
@Override
public FacetArraysSource getFacetArraysSource() {
return orig.getFacetArraysSource();
}
@Override
public double getValueOf(FacetArrays arrays, int idx) {
return orig.getValueOf(arrays, idx);
}
} }
} }

View File

@ -10,6 +10,7 @@ import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.partitions.PartitionsFacetResultsHandler; import org.apache.lucene.facet.partitions.PartitionsFacetResultsHandler;
import org.apache.lucene.facet.sampling.Sampler.SampleResult; import org.apache.lucene.facet.sampling.Sampler.SampleResult;
import org.apache.lucene.facet.search.FacetArrays; import org.apache.lucene.facet.search.FacetArrays;
import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetResult; import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.search.FacetsAccumulator; import org.apache.lucene.facet.search.FacetsAccumulator;
import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyReader;
@ -90,7 +91,8 @@ public class SamplingAccumulator extends OldFacetsAccumulator {
List<FacetResult> results = new ArrayList<FacetResult>(); List<FacetResult> results = new ArrayList<FacetResult>();
for (FacetResult fres : sampleRes) { for (FacetResult fres : sampleRes) {
// for sure fres is not null because this is guaranteed by the delegee. // for sure fres is not null because this is guaranteed by the delegee.
PartitionsFacetResultsHandler frh = createFacetResultsHandler(fres.getFacetRequest()); FacetRequest fr = fres.getFacetRequest();
PartitionsFacetResultsHandler frh = createFacetResultsHandler(fr, createOrdinalValueResolver(fr));
if (samplerFixer != null) { if (samplerFixer != null) {
// fix the result of current request // fix the result of current request
samplerFixer.fixResult(docids, fres, samplingRatio); samplerFixer.fixResult(docids, fres, samplingRatio);
@ -106,7 +108,7 @@ public class SamplingAccumulator extends OldFacetsAccumulator {
// final labeling if allowed (because labeling is a costly operation) // final labeling if allowed (because labeling is a costly operation)
if (fres.getFacetResultNode().ordinal == TaxonomyReader.INVALID_ORDINAL) { if (fres.getFacetResultNode().ordinal == TaxonomyReader.INVALID_ORDINAL) {
// category does not exist, add an empty result // category does not exist, add an empty result
results.add(emptyResult(fres.getFacetResultNode().ordinal, fres.getFacetRequest())); results.add(emptyResult(fres.getFacetResultNode().ordinal, fr));
} else { } else {
frh.labelResult(fres); frh.labelResult(fres);
results.add(fres); results.add(fres);

View File

@ -9,6 +9,7 @@ import org.apache.lucene.facet.old.ScoredDocIDs;
import org.apache.lucene.facet.params.FacetSearchParams; import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.partitions.PartitionsFacetResultsHandler; import org.apache.lucene.facet.partitions.PartitionsFacetResultsHandler;
import org.apache.lucene.facet.sampling.Sampler.SampleResult; import org.apache.lucene.facet.sampling.Sampler.SampleResult;
import org.apache.lucene.facet.search.FacetRequest;
import org.apache.lucene.facet.search.FacetResult; import org.apache.lucene.facet.search.FacetResult;
import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyReader;
@ -68,7 +69,8 @@ public class SamplingWrapper extends OldFacetsAccumulator {
for (FacetResult fres : sampleRes) { for (FacetResult fres : sampleRes) {
// for sure fres is not null because this is guaranteed by the delegee. // for sure fres is not null because this is guaranteed by the delegee.
PartitionsFacetResultsHandler frh = createFacetResultsHandler(fres.getFacetRequest()); FacetRequest fr = fres.getFacetRequest();
PartitionsFacetResultsHandler frh = createFacetResultsHandler(fr, createOrdinalValueResolver(fr));
if (sampleFixer != null) { if (sampleFixer != null) {
// fix the result of current request // fix the result of current request
sampleFixer.fixResult(docids, fres, sampleSet.actualSampleRatio); sampleFixer.fixResult(docids, fres, sampleSet.actualSampleRatio);
@ -83,7 +85,7 @@ public class SamplingWrapper extends OldFacetsAccumulator {
// final labeling if allowed (because labeling is a costly operation) // final labeling if allowed (because labeling is a costly operation)
if (fres.getFacetResultNode().ordinal == TaxonomyReader.INVALID_ORDINAL) { if (fres.getFacetResultNode().ordinal == TaxonomyReader.INVALID_ORDINAL) {
// category does not exist, add an empty result // category does not exist, add an empty result
results.add(emptyResult(fres.getFacetResultNode().ordinal, fres.getFacetRequest())); results.add(emptyResult(fres.getFacetResultNode().ordinal, fr));
} else { } else {
frh.labelResult(fres); frh.labelResult(fres);
results.add(fres); results.add(fres);

View File

@ -36,14 +36,4 @@ public class CountFacetRequest extends FacetRequest {
return CountingFacetsAggregator.create(fip.getCategoryListParams(categoryPath)); return CountingFacetsAggregator.create(fip.getCategoryListParams(categoryPath));
} }
@Override
public double getValueOf(FacetArrays arrays, int ordinal) {
return arrays.getIntArray()[ordinal];
}
@Override
public FacetArraysSource getFacetArraysSource() {
return FacetArraysSource.INT;
}
} }

View File

@ -9,6 +9,7 @@ import java.util.Comparator;
import org.apache.lucene.facet.search.FacetRequest.SortOrder; import org.apache.lucene.facet.search.FacetRequest.SortOrder;
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays; import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.PriorityQueue;
/* /*
@ -31,12 +32,11 @@ import org.apache.lucene.util.PriorityQueue;
/** /**
* A {@link FacetResultsHandler} which counts the top-K facets at depth 1 only * A {@link FacetResultsHandler} which counts the top-K facets at depth 1 only
* and always labels all result categories. The results are always sorted by * and always labels all result categories. The results are always sorted by
* value, in descending order. Sub-classes are responsible to pull the values * value, in descending order.
* from the corresponding {@link FacetArrays}.
* *
* @lucene.experimental * @lucene.experimental
*/ */
public abstract class DepthOneFacetResultsHandler extends FacetResultsHandler { public class DepthOneFacetResultsHandler extends FacetResultsHandler {
private static class FacetResultNodeQueue extends PriorityQueue<FacetResultNode> { private static class FacetResultNodeQueue extends PriorityQueue<FacetResultNode> {
@ -51,40 +51,19 @@ public abstract class DepthOneFacetResultsHandler extends FacetResultsHandler {
@Override @Override
protected boolean lessThan(FacetResultNode a, FacetResultNode b) { protected boolean lessThan(FacetResultNode a, FacetResultNode b) {
if (a.value < b.value) return true; return a.compareTo(b) < 0;
if (a.value > b.value) return false;
// both have the same value, break tie by ordinal
return a.ordinal < b.ordinal;
} }
} }
public DepthOneFacetResultsHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, FacetArrays facetArrays) { public DepthOneFacetResultsHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, FacetArrays facetArrays,
super(taxonomyReader, facetRequest, facetArrays); OrdinalValueResolver resolver) {
super(taxonomyReader, facetRequest, resolver, facetArrays);
assert facetRequest.getDepth() == 1 : "this handler only computes the top-K facets at depth 1"; assert facetRequest.getDepth() == 1 : "this handler only computes the top-K facets at depth 1";
assert facetRequest.numResults == facetRequest.getNumLabel() : "this handler always labels all top-K results"; assert facetRequest.numResults == facetRequest.getNumLabel() : "this handler always labels all top-K results";
assert facetRequest.getSortOrder() == SortOrder.DESCENDING : "this handler always sorts results in descending order"; assert facetRequest.getSortOrder() == SortOrder.DESCENDING : "this handler always sorts results in descending order";
} }
/** Returnt the value of the requested ordinal. Called once for the result root. */
protected abstract double valueOf(int ordinal);
/**
* Add the siblings of {@code ordinal} to the given list. This is called
* whenever the number of results is too high (&gt; taxonomy size), instead of
* adding them to a {@link PriorityQueue}.
*/
protected abstract void addSiblings(int ordinal, int[] siblings, ArrayList<FacetResultNode> nodes) throws IOException;
/**
* Add the siblings of {@code ordinal} to the given {@link PriorityQueue}. The
* given {@link PriorityQueue} is already filled with sentinel objects, so
* implementations are encouraged to use {@link PriorityQueue#top()} and
* {@link PriorityQueue#updateTop()} for best performance. Returns the total
* number of siblings.
*/
protected abstract int addSiblings(int ordinal, int[] siblings, PriorityQueue<FacetResultNode> pq);
@Override @Override
public final FacetResult compute() throws IOException { public final FacetResult compute() throws IOException {
ParallelTaxonomyArrays arrays = taxonomyReader.getParallelTaxonomyArrays(); ParallelTaxonomyArrays arrays = taxonomyReader.getParallelTaxonomyArrays();
@ -93,23 +72,28 @@ public abstract class DepthOneFacetResultsHandler extends FacetResultsHandler {
int rootOrd = taxonomyReader.getOrdinal(facetRequest.categoryPath); int rootOrd = taxonomyReader.getOrdinal(facetRequest.categoryPath);
FacetResultNode root = new FacetResultNode(rootOrd, valueOf(rootOrd)); FacetResultNode root = new FacetResultNode(rootOrd, resolver.valueOf(rootOrd));
root.label = facetRequest.categoryPath; root.label = facetRequest.categoryPath;
if (facetRequest.numResults > taxonomyReader.getSize()) { if (facetRequest.numResults > taxonomyReader.getSize()) {
// specialize this case, user is interested in all available results // specialize this case, user is interested in all available results
ArrayList<FacetResultNode> nodes = new ArrayList<FacetResultNode>(); ArrayList<FacetResultNode> nodes = new ArrayList<FacetResultNode>();
int child = children[rootOrd]; int ordinal = children[rootOrd];
addSiblings(child, siblings, nodes); while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
Collections.sort(nodes, new Comparator<FacetResultNode>() { double value = resolver.valueOf(ordinal);
if (value > 0) {
FacetResultNode node = new FacetResultNode(ordinal, value);
node.label = taxonomyReader.getPath(ordinal);
nodes.add(node);
}
ordinal = siblings[ordinal];
}
CollectionUtil.introSort(nodes, Collections.reverseOrder(new Comparator<FacetResultNode>() {
@Override @Override
public int compare(FacetResultNode o1, FacetResultNode o2) { public int compare(FacetResultNode o1, FacetResultNode o2) {
int value = (int) (o2.value - o1.value); return o1.compareTo(o2);
if (value == 0) {
value = o2.ordinal - o1.ordinal;
}
return value;
} }
}); }));
root.subResults = nodes; root.subResults = nodes;
return new FacetResult(facetRequest, root, nodes.size()); return new FacetResult(facetRequest, root, nodes.size());
@ -117,7 +101,21 @@ public abstract class DepthOneFacetResultsHandler extends FacetResultsHandler {
// since we use sentinel objects, we cannot reuse PQ. but that's ok because it's not big // since we use sentinel objects, we cannot reuse PQ. but that's ok because it's not big
PriorityQueue<FacetResultNode> pq = new FacetResultNodeQueue(facetRequest.numResults, true); PriorityQueue<FacetResultNode> pq = new FacetResultNodeQueue(facetRequest.numResults, true);
int numSiblings = addSiblings(children[rootOrd], siblings, pq); int ordinal = children[rootOrd];
FacetResultNode top = pq.top();
int numSiblings = 0;
while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
double value = resolver.valueOf(ordinal);
if (value > 0) {
++numSiblings;
if (value > top.value) {
top.value = value;
top.ordinal = ordinal;
top = pq.updateTop();
}
}
ordinal = siblings[ordinal];
}
// pop() the least (sentinel) elements // pop() the least (sentinel) elements
int pqsize = pq.size(); int pqsize = pq.size();

View File

@ -56,23 +56,6 @@ public abstract class FacetRequest {
GLOBAL_FLAT GLOBAL_FLAT
} }
/**
* Specifies which array of {@link FacetArrays} should be used to resolve
* values. When set to {@link #INT} or {@link #FLOAT}, allows creating an
* optimized {@link FacetResultsHandler}, which does not call
* {@link FacetRequest#getValueOf(FacetArrays, int)} for every ordinal.
* <p>
* If set to {@link #BOTH}, the {@link FacetResultsHandler} will use
* {@link FacetRequest#getValueOf(FacetArrays, int)} to resolve ordinal
* values, although it is recommended that you consider writing a specialized
* {@link FacetResultsHandler}.
* <p>
* Can also be set to {@link #NONE}, to indicate that this
* {@link FacetRequest} does not use {@link FacetArrays} to aggregate its
* result categories. Such requests won't use {@link FacetResultsHandler}.
*/
public enum FacetArraysSource { INT, FLOAT, BOTH, NONE }
/** /**
* Defines which categories to return. If {@link #DESCENDING} (the default), * Defines which categories to return. If {@link #DESCENDING} (the default),
* the highest {@link FacetRequest#numResults} weighted categories will be * the highest {@link FacetRequest#numResults} weighted categories will be
@ -159,12 +142,6 @@ public abstract class FacetRequest {
return depth; return depth;
} }
/**
* Returns the {@link FacetArraysSource} this request uses in
* {@link #getValueOf(FacetArrays, int)}.
*/
public abstract FacetArraysSource getFacetArraysSource();
/** /**
* Allows to specify the number of categories to label. By default all * Allows to specify the number of categories to label. By default all
* returned categories are labeled. * returned categories are labeled.
@ -187,24 +164,6 @@ public abstract class FacetRequest {
return sortOrder; return sortOrder;
} }
/**
* Return the weight of the requested category ordinal. A {@link FacetRequest}
* is responsible for resolving the weight of a category given the
* {@link FacetArrays} and {@link #getFacetArraysSource()}. E.g. a counting
* request will probably return the value of the category from
* {@link FacetArrays#getIntArray()} while an average-weighting request will
* compute the value using both arrays.
*
* @param arrays
* the arrays used to aggregate the categories weights.
* @param ordinal
* the category ordinal for which to return the weight.
*/
// TODO perhaps instead of getValueOf we can have a postProcess(FacetArrays)
// That, together with getFacetArraysSource should allow ResultHandlers to
// efficiently obtain the values from the arrays directly
public abstract double getValueOf(FacetArrays arrays, int ordinal);
@Override @Override
public int hashCode() { public int hashCode() {
return hashCode; return hashCode;

View File

@ -97,6 +97,11 @@ public class FacetResult {
Map<CategoryPath, FacetResultNode> mergedNodes = new HashMap<CategoryPath,FacetResultNode>(); Map<CategoryPath, FacetResultNode> mergedNodes = new HashMap<CategoryPath,FacetResultNode>();
FacetArrays arrays = dimArrays != null ? dimArrays.get(frs.get(0).getFacetRequest().categoryPath.components[0]) : null; FacetArrays arrays = dimArrays != null ? dimArrays.get(frs.get(0).getFacetRequest().categoryPath.components[0]) : null;
for (FacetResult fr : frs) { for (FacetResult fr : frs) {
FacetRequest freq = fr.getFacetRequest();
OrdinalValueResolver resolver = null;
if (arrays != null) {
resolver = freq.createFacetsAggregator(FacetIndexingParams.DEFAULT).createOrdinalValueResolver(freq, arrays);
}
FacetResultNode frn = fr.getFacetResultNode(); FacetResultNode frn = fr.getFacetResultNode();
FacetResultNode merged = mergedNodes.get(frn.label); FacetResultNode merged = mergedNodes.get(frn.label);
if (merged == null) { if (merged == null) {
@ -105,7 +110,10 @@ public class FacetResult {
FacetResultNode parentNode = null; FacetResultNode parentNode = null;
while (parent.length > 0 && (parentNode = mergedNodes.get(parent)) == null) { while (parent.length > 0 && (parentNode = mergedNodes.get(parent)) == null) {
int parentOrd = taxoReader.getOrdinal(parent); int parentOrd = taxoReader.getOrdinal(parent);
double parentValue = arrays != null ? fr.getFacetRequest().getValueOf(arrays, parentOrd) : -1; double parentValue = -1;
if (arrays != null) {
parentValue = resolver.valueOf(parentOrd);
}
parentNode = new FacetResultNode(parentOrd, parentValue); parentNode = new FacetResultNode(parentOrd, parentValue);
parentNode.label = parent; parentNode.label = parent;
parentNode.subResults = new ArrayList<FacetResultNode>(); parentNode.subResults = new ArrayList<FacetResultNode>();
@ -157,16 +165,6 @@ public class FacetResult {
public FacetsAggregator createFacetsAggregator(FacetIndexingParams fip) { public FacetsAggregator createFacetsAggregator(FacetIndexingParams fip) {
throw new UnsupportedOperationException("not supported by this request"); throw new UnsupportedOperationException("not supported by this request");
} }
@Override
public double getValueOf(FacetArrays arrays, int idx) {
throw new UnsupportedOperationException("not supported by this request");
}
@Override
public FacetArraysSource getFacetArraysSource() {
throw new UnsupportedOperationException("not supported by this request");
}
}; };
mergedResult = new FacetResult(dummy, mergedNodes.get(min), -1); mergedResult = new FacetResult(dummy, mergedNodes.get(min), -1);
} }

View File

@ -28,10 +28,13 @@ import org.apache.lucene.facet.taxonomy.TaxonomyReader;
* Result of faceted search for a certain taxonomy node. This class serves as a * Result of faceted search for a certain taxonomy node. This class serves as a
* bin of different attributes of the result node, such as its {@link #ordinal} * bin of different attributes of the result node, such as its {@link #ordinal}
* as well as {@link #label}. You are not expected to modify those values. * as well as {@link #label}. You are not expected to modify those values.
* <p>
* This class implements {@link Comparable} for easy comparisons of result
* nodes, e.g. when sorting or computing top-K nodes.
* *
* @lucene.experimental * @lucene.experimental
*/ */
public class FacetResultNode { public class FacetResultNode implements Comparable<FacetResultNode> {
public static final List<FacetResultNode> EMPTY_SUB_RESULTS = Collections.emptyList(); public static final List<FacetResultNode> EMPTY_SUB_RESULTS = Collections.emptyList();
@ -71,6 +74,15 @@ public class FacetResultNode {
this.ordinal = ordinal; this.ordinal = ordinal;
this.value = value; this.value = value;
} }
@Override
public int compareTo(FacetResultNode o) {
int res = Double.compare(value, o.value);
if (res == 0) {
res = ordinal - o.ordinal;
}
return res;
}
@Override @Override
public String toString() { public String toString() {

View File

@ -29,15 +29,17 @@ import org.apache.lucene.facet.taxonomy.TaxonomyReader;
public abstract class FacetResultsHandler { public abstract class FacetResultsHandler {
public final TaxonomyReader taxonomyReader; public final TaxonomyReader taxonomyReader;
public final FacetRequest facetRequest; public final FacetRequest facetRequest;
protected final OrdinalValueResolver resolver;
protected final FacetArrays facetArrays; protected final FacetArrays facetArrays;
public FacetResultsHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, FacetArrays facetArrays) { public FacetResultsHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, OrdinalValueResolver resolver,
FacetArrays facetArrays) {
this.taxonomyReader = taxonomyReader; this.taxonomyReader = taxonomyReader;
this.facetRequest = facetRequest; this.facetRequest = facetRequest;
this.facetArrays = facetArrays; this.facetArrays = facetArrays;
this.resolver = resolver;
} }
/** Computes the {@link FacetResult} for the given {@link FacetArrays}. */ /** Computes the {@link FacetResult} for the given {@link FacetArrays}. */

View File

@ -46,4 +46,11 @@ public interface FacetsAggregator {
/** Returns {@code true} if this aggregator requires document scores. */ /** Returns {@code true} if this aggregator requires document scores. */
public boolean requiresDocScores(); public boolean requiresDocScores();
/**
* Creates the appropriate {@link OrdinalValueResolver} for this aggregator
* and the given {@link FacetRequest}. The request is passed so that compound
* aggregators can return the correct {@link OrdinalValueResolver}.
*/
public OrdinalValueResolver createOrdinalValueResolver(FacetRequest facetRequest, FacetArrays arrays);
} }

View File

@ -1,78 +0,0 @@
package org.apache.lucene.facet.search;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.util.PriorityQueue;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A {@link DepthOneFacetResultsHandler} which fills the categories values from
* {@link FacetArrays#getFloatArray()}.
*
* @lucene.experimental
*/
public final class FloatFacetResultsHandler extends DepthOneFacetResultsHandler {
private final float[] values;
public FloatFacetResultsHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, FacetArrays facetArrays) {
super(taxonomyReader, facetRequest, facetArrays);
this.values = facetArrays.getFloatArray();
}
@Override
protected final double valueOf(int ordinal) {
return values[ordinal];
}
@Override
protected final int addSiblings(int ordinal, int[] siblings, PriorityQueue<FacetResultNode> pq) {
FacetResultNode top = pq.top();
int numResults = 0;
while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
float value = values[ordinal];
if (value > 0.0f) {
++numResults;
if (value > top.value) {
top.value = value;
top.ordinal = ordinal;
top = pq.updateTop();
}
}
ordinal = siblings[ordinal];
}
return numResults;
}
@Override
protected final void addSiblings(int ordinal, int[] siblings, ArrayList<FacetResultNode> nodes) throws IOException {
while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
float value = values[ordinal];
if (value > 0) {
FacetResultNode node = new FacetResultNode(ordinal, value);
node.label = taxonomyReader.getPath(ordinal);
nodes.add(node);
}
ordinal = siblings[ordinal];
}
}
}

View File

@ -1,78 +0,0 @@
package org.apache.lucene.facet.search;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.util.PriorityQueue;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A {@link DepthOneFacetResultsHandler} which fills the categories values from
* {@link FacetArrays#getIntArray()}.
*
* @lucene.experimental
*/
public final class IntFacetResultsHandler extends DepthOneFacetResultsHandler {
private final int[] values;
public IntFacetResultsHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, FacetArrays facetArrays) {
super(taxonomyReader, facetRequest, facetArrays);
this.values = facetArrays.getIntArray();
}
@Override
protected final double valueOf(int ordinal) {
return values[ordinal];
}
@Override
protected final int addSiblings(int ordinal, int[] siblings, PriorityQueue<FacetResultNode> pq) {
FacetResultNode top = pq.top();
int numResults = 0;
while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
int value = values[ordinal];
if (value > 0) {
++numResults;
if (value > top.value) {
top.value = value;
top.ordinal = ordinal;
top = pq.updateTop();
}
}
ordinal = siblings[ordinal];
}
return numResults;
}
@Override
protected final void addSiblings(int ordinal, int[] siblings, ArrayList<FacetResultNode> nodes) throws IOException {
while (ordinal != TaxonomyReader.INVALID_ORDINAL) {
int value = values[ordinal];
if (value > 0) {
FacetResultNode node = new FacetResultNode(ordinal, value);
node.label = taxonomyReader.getPath(ordinal);
nodes.add(node);
}
ordinal = siblings[ordinal];
}
}
}

View File

@ -4,6 +4,7 @@ import java.io.IOException;
import org.apache.lucene.facet.params.CategoryListParams; import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs; import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.search.OrdinalValueResolver.IntValueResolver;
import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyReader;
/* /*
@ -60,4 +61,9 @@ public abstract class IntRollupFacetsAggregator implements FacetsAggregator {
return false; return false;
} }
@Override
public OrdinalValueResolver createOrdinalValueResolver(FacetRequest facetRequest, FacetArrays arrays) {
return new IntValueResolver(arrays);
}
} }

View File

@ -88,4 +88,9 @@ public class MultiFacetsAggregator implements FacetsAggregator {
return false; return false;
} }
@Override
public OrdinalValueResolver createOrdinalValueResolver(FacetRequest facetRequest, FacetArrays arrays) {
return categoryAggregators.get(facetRequest.categoryPath).createOrdinalValueResolver(facetRequest, arrays);
}
} }

View File

@ -0,0 +1,76 @@
package org.apache.lucene.facet.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Resolves an ordinal's value to given the {@link FacetArrays}.
* Implementations of this class are encouraged to initialize the needed array
* from {@link FacetArrays} in the constructor.
*/
public abstract class OrdinalValueResolver {
/**
* An {@link OrdinalValueResolver} which resolves ordinals value from
* {@link FacetArrays#getIntArray()}, by returning the value in the array.
*/
public static final class IntValueResolver extends OrdinalValueResolver {
private final int[] values;
public IntValueResolver(FacetArrays arrays) {
super(arrays);
this.values = arrays.getIntArray();
}
@Override
public final double valueOf(int ordinal) {
return values[ordinal];
}
}
/**
* An {@link OrdinalValueResolver} which resolves ordinals value from
* {@link FacetArrays#getFloatArray()}, by returning the value in the array.
*/
public static final class FloatValueResolver extends OrdinalValueResolver {
private final float[] values;
public FloatValueResolver(FacetArrays arrays) {
super(arrays);
this.values = arrays.getFloatArray();
}
@Override
public final double valueOf(int ordinal) {
return values[ordinal];
}
}
protected final FacetArrays arrays;
protected OrdinalValueResolver(FacetArrays arrays) {
this.arrays = arrays;
}
/** Returns the value of the given ordinal. */
public abstract double valueOf(int ordinal);
}

View File

@ -61,5 +61,11 @@ public class PerCategoryListAggregator implements FacetsAggregator {
} }
return false; return false;
} }
@Override
public OrdinalValueResolver createOrdinalValueResolver(FacetRequest facetRequest, FacetArrays arrays) {
CategoryListParams clp = fip.getCategoryListParams(facetRequest.categoryPath);
return aggregators.get(clp).createOrdinalValueResolver(facetRequest, arrays);
}
} }

View File

@ -38,14 +38,4 @@ public class SumScoreFacetRequest extends FacetRequest {
return new SumScoreFacetsAggregator(); return new SumScoreFacetsAggregator();
} }
@Override
public double getValueOf(FacetArrays arrays, int ordinal) {
return arrays.getFloatArray()[ordinal];
}
@Override
public FacetArraysSource getFacetArraysSource() {
return FacetArraysSource.FLOAT;
}
} }

View File

@ -4,6 +4,7 @@ import java.io.IOException;
import org.apache.lucene.facet.params.CategoryListParams; import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs; import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
import org.apache.lucene.facet.search.OrdinalValueResolver.FloatValueResolver;
import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyReader;
import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRef;
@ -76,5 +77,10 @@ public class SumScoreFacetsAggregator implements FacetsAggregator {
public boolean requiresDocScores() { public boolean requiresDocScores() {
return true; return true;
} }
@Override
public OrdinalValueResolver createOrdinalValueResolver(FacetRequest facetRequest, FacetArrays arrays) {
return new FloatValueResolver(arrays);
}
} }

View File

@ -11,7 +11,6 @@ import java.util.Map.Entry;
import org.apache.lucene.facet.params.CategoryListParams; import org.apache.lucene.facet.params.CategoryListParams;
import org.apache.lucene.facet.params.CategoryListParams.OrdinalPolicy; import org.apache.lucene.facet.params.CategoryListParams.OrdinalPolicy;
import org.apache.lucene.facet.params.FacetSearchParams; import org.apache.lucene.facet.params.FacetSearchParams;
import org.apache.lucene.facet.search.FacetRequest.FacetArraysSource;
import org.apache.lucene.facet.search.FacetRequest.ResultMode; import org.apache.lucene.facet.search.FacetRequest.ResultMode;
import org.apache.lucene.facet.search.FacetRequest.SortOrder; import org.apache.lucene.facet.search.FacetRequest.SortOrder;
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs; import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
@ -152,24 +151,18 @@ public class TaxonomyFacetsAccumulator extends FacetsAccumulator {
/** /**
* Creates a {@link FacetResultsHandler} that matches the given * Creates a {@link FacetResultsHandler} that matches the given
* {@link FacetRequest}. * {@link FacetRequest}, using the {@link OrdinalValueResolver}.
*/ */
protected FacetResultsHandler createFacetResultsHandler(FacetRequest fr) { protected FacetResultsHandler createFacetResultsHandler(FacetRequest fr, OrdinalValueResolver resolver) {
if (fr.getDepth() == 1 && fr.getSortOrder() == SortOrder.DESCENDING) { if (fr.getDepth() == 1 && fr.getSortOrder() == SortOrder.DESCENDING) {
FacetArraysSource fas = fr.getFacetArraysSource(); return new DepthOneFacetResultsHandler(taxonomyReader, fr, facetArrays, resolver);
if (fas == FacetArraysSource.INT) {
return new IntFacetResultsHandler(taxonomyReader, fr, facetArrays);
}
if (fas == FacetArraysSource.FLOAT) {
return new FloatFacetResultsHandler(taxonomyReader, fr, facetArrays);
}
} }
if (fr.getResultMode() == ResultMode.PER_NODE_IN_TREE) { if (fr.getResultMode() == ResultMode.PER_NODE_IN_TREE) {
return new TopKInEachNodeHandler(taxonomyReader, fr, facetArrays); return new TopKInEachNodeHandler(taxonomyReader, fr, resolver, facetArrays);
} } else {
return new TopKFacetResultsHandler(taxonomyReader, fr, facetArrays); return new TopKFacetResultsHandler(taxonomyReader, fr, resolver, facetArrays);
}
} }
/** /**
@ -212,7 +205,7 @@ public class TaxonomyFacetsAccumulator extends FacetsAccumulator {
} }
} }
FacetResultsHandler frh = createFacetResultsHandler(fr); FacetResultsHandler frh = createFacetResultsHandler(fr, aggregator.createOrdinalValueResolver(fr, facetArrays));
res.add(frh.compute()); res.add(frh.compute());
} }
return res; return res;

View File

@ -34,16 +34,10 @@ import org.apache.lucene.facet.util.ResultSortUtils;
*/ */
public class TopKFacetResultsHandler extends PartitionsFacetResultsHandler { public class TopKFacetResultsHandler extends PartitionsFacetResultsHandler {
/** /** Construct top-K results handler. */
* Construct top-K results handler. public TopKFacetResultsHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest,
* OrdinalValueResolver resolver, FacetArrays facetArrays) {
* @param taxonomyReader super(taxonomyReader, facetRequest, resolver, facetArrays);
* taxonomy reader
* @param facetRequest
* facet request being served
*/
public TopKFacetResultsHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, FacetArrays facetArrays) {
super(taxonomyReader, facetRequest, facetArrays);
} }
// fetch top K for specific partition. // fetch top K for specific partition.
@ -56,7 +50,7 @@ public class TopKFacetResultsHandler extends PartitionsFacetResultsHandler {
double value = 0; double value = 0;
if (isSelfPartition(ordinal, facetArrays, offset)) { if (isSelfPartition(ordinal, facetArrays, offset)) {
int partitionSize = facetArrays.arrayLength; int partitionSize = facetArrays.arrayLength;
value = facetRequest.getValueOf(facetArrays, ordinal % partitionSize); value = resolver.valueOf(ordinal % partitionSize);
} }
FacetResultNode parentResultNode = new FacetResultNode(ordinal, value); FacetResultNode parentResultNode = new FacetResultNode(ordinal, value);
@ -158,7 +152,7 @@ public class TopKFacetResultsHandler extends PartitionsFacetResultsHandler {
// collect it, if belongs to current partition, and then push its kids on itself, if applicable // collect it, if belongs to current partition, and then push its kids on itself, if applicable
if (tosOrdinal >= offset) { // tosOrdinal resides in current partition if (tosOrdinal >= offset) { // tosOrdinal resides in current partition
int relativeOrdinal = tosOrdinal % partitionSize; int relativeOrdinal = tosOrdinal % partitionSize;
double value = facetRequest.getValueOf(facetArrays, relativeOrdinal); double value = resolver.valueOf(relativeOrdinal);
if (value != 0 && !Double.isNaN(value)) { if (value != 0 && !Double.isNaN(value)) {
// Count current ordinal -- the TOS // Count current ordinal -- the TOS
if (reusable == null) { if (reusable == null) {

View File

@ -62,8 +62,9 @@ import org.apache.lucene.util.PriorityQueue;
*/ */
public class TopKInEachNodeHandler extends PartitionsFacetResultsHandler { public class TopKInEachNodeHandler extends PartitionsFacetResultsHandler {
public TopKInEachNodeHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, FacetArrays facetArrays) { public TopKInEachNodeHandler(TaxonomyReader taxonomyReader, FacetRequest facetRequest, OrdinalValueResolver resolver,
super(taxonomyReader, facetRequest, facetArrays); FacetArrays facetArrays) {
super(taxonomyReader, facetRequest, resolver, facetArrays);
} }
/** /**
@ -112,8 +113,8 @@ public class TopKInEachNodeHandler extends PartitionsFacetResultsHandler {
// this will grow into the returned IntermediateFacetResult // this will grow into the returned IntermediateFacetResult
IntToObjectMap<AACO> AACOsOfOnePartition = new IntToObjectMap<AACO>(); IntToObjectMap<AACO> AACOsOfOnePartition = new IntToObjectMap<AACO>();
int partitionSize = facetArrays.arrayLength; // all partitions, except, possibly, the last, // all partitions, except, possibly, the last, have the same length. Hence modulo is OK.
// have the same length. Hence modulo is OK. int partitionSize = facetArrays.arrayLength;
int depth = facetRequest.getDepth(); int depth = facetRequest.getDepth();
@ -123,7 +124,7 @@ public class TopKInEachNodeHandler extends PartitionsFacetResultsHandler {
facetRequest, AACOsOfOnePartition); facetRequest, AACOsOfOnePartition);
if (isSelfPartition(rootNode, facetArrays, offset)) { if (isSelfPartition(rootNode, facetArrays, offset)) {
tempFRWH.isRootNodeIncluded = true; tempFRWH.isRootNodeIncluded = true;
tempFRWH.rootNodeValue = this.facetRequest.getValueOf(facetArrays, rootNode % partitionSize); tempFRWH.rootNodeValue = resolver.valueOf(rootNode % partitionSize);
} }
return tempFRWH; return tempFRWH;
} }
@ -267,7 +268,7 @@ public class TopKInEachNodeHandler extends PartitionsFacetResultsHandler {
while (tosOrdinal >= offset) { // while tosOrdinal belongs to the given partition; here, too, we use the fact while (tosOrdinal >= offset) { // while tosOrdinal belongs to the given partition; here, too, we use the fact
// that TaxonomyReader.INVALID_ORDINAL == -1 < offset // that TaxonomyReader.INVALID_ORDINAL == -1 < offset
double value = facetRequest.getValueOf(facetArrays, tosOrdinal % partitionSize); double value = resolver.valueOf(tosOrdinal % partitionSize);
if (value != 0) { // the value of yc is not 0, it is to be considered. if (value != 0) { // the value of yc is not 0, it is to be considered.
totalNumOfDescendantsConsidered++; totalNumOfDescendantsConsidered++;
@ -338,7 +339,7 @@ public class TopKInEachNodeHandler extends PartitionsFacetResultsHandler {
facetRequest, AACOsOfOnePartition); facetRequest, AACOsOfOnePartition);
if (isSelfPartition(rootNode, facetArrays, offset)) { if (isSelfPartition(rootNode, facetArrays, offset)) {
tempFRWH.isRootNodeIncluded = true; tempFRWH.isRootNodeIncluded = true;
tempFRWH.rootNodeValue = this.facetRequest.getValueOf(facetArrays, rootNode % partitionSize); tempFRWH.rootNodeValue = resolver.valueOf(rootNode % partitionSize);
} }
tempFRWH.totalNumOfFacetsConsidered = totalNumOfDescendantsConsidered; tempFRWH.totalNumOfFacetsConsidered = totalNumOfDescendantsConsidered;
return tempFRWH; return tempFRWH;
@ -374,7 +375,7 @@ public class TopKInEachNodeHandler extends PartitionsFacetResultsHandler {
int ret = 0; int ret = 0;
if (offset <= ordinal) { if (offset <= ordinal) {
// ordinal belongs to the current partition // ordinal belongs to the current partition
if (0 != facetRequest.getValueOf(facetArrays, ordinal % partitionSize)) { if (0 != resolver.valueOf(ordinal % partitionSize)) {
ret++; ret++;
} }
} }

View File

@ -184,7 +184,7 @@ public class TestTopKInEachNodeResultHandler extends FacetTestCase {
} }
// now rearrange // now rearrange
double [] expectedValues00 = { 6.0, 1.0, 5.0, 3.0, 2.0 }; double [] expectedValues00 = { 6.0, 1.0, 5.0, 3.0, 2.0 };
fr = sfa.createFacetResultsHandler(cfra23).rearrangeFacetResult(fr); fr = sfa.createFacetResultsHandler(cfra23, sfa.createOrdinalValueResolver(cfra23)).rearrangeFacetResult(fr);
i = 0; i = 0;
for (FacetResultNode node : parentRes.subResults) { for (FacetResultNode node : parentRes.subResults) {
assertEquals(expectedValues00[i++], node.value, Double.MIN_VALUE); assertEquals(expectedValues00[i++], node.value, Double.MIN_VALUE);