SOLR-14482: Fix or suppress warnings in solr/search/facet

This commit is contained in:
Erick Erickson 2020-05-21 08:59:32 -04:00
parent 28209cb8b1
commit 9c066f60f1
63 changed files with 2946 additions and 2707 deletions

View File

@ -240,8 +240,7 @@ Other Changes
* SOLR-14226: Fix or suppress 14 resource leak warnings in apache/solr/core (Andras Salaman via
Erick Erickson)
* SOLR-14485: Fix or suppress 11 resource leak warnings in apache/solr/cloud (Andras Salaman via Erick Erickson)
* SOLR-14482: Fix or suppress warnings in solr/search/facet (Erick Erickson)
================== 8.5.1 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

View File

@ -362,7 +362,7 @@ class ExclusiveSliceProperty {
this.replica = replica;
}
public String toString() {
StringBuilder sb = new StringBuilder(System.lineSeparator()).append(System.lineSeparator()).append("******EOE20 starting toString of SliceReplica");
StringBuilder sb = new StringBuilder(System.lineSeparator()).append(System.lineSeparator());
sb.append(" :").append(System.lineSeparator()).append("slice: ").append(slice.toString()).append(System.lineSeparator()).append(" replica: ").append(replica.toString()).append(System.lineSeparator());
return sb.toString();
}

View File

@ -16,6 +16,8 @@
*/
package org.apache.solr.metrics;
import java.io.IOException;
/**
* Used by objects that expose metrics through {@link SolrMetricManager}.
*/
@ -62,9 +64,14 @@ public interface SolrMetricProducer extends AutoCloseable {
* Implementations should always call <code>SolrMetricProducer.super.close()</code> to ensure that
* metrics with the same life-cycle as this component are properly unregistered. This prevents
* obscure memory leaks.
*
* from: https://docs.oracle.com/javase/8/docs/api/java/lang/AutoCloseable.html
* While this interface method is declared to throw Exception, implementers are strongly encouraged
* to declare concrete implementations of the close method to throw more specific exceptions, or to
* throw no exception at all if the close operation cannot fail.
*/
@Override
default void close() throws Exception {
default void close() throws IOException {
SolrMetricsContext context = getSolrMetricsContext();
if (context == null) {
return;

View File

@ -78,7 +78,7 @@ public class JSONResponseWriter implements QueryResponseWriter {
return new JSONWriter(writer, req, rsp);
}
}
/**
* Writes NamedLists directly as an array of NameTypeValue JSON objects...
@ -248,9 +248,10 @@ class ArrayOfNameTypeValueJSONWriter extends JSONWriter {
}
}
abstract class NaNFloatWriter extends JSONWriter {
abstract static class NaNFloatWriter extends JSONWriter {
abstract protected String getNaN();
abstract protected String getInf();
public NaNFloatWriter(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) {
@ -283,3 +284,4 @@ abstract class NaNFloatWriter extends JSONWriter {
}
}
}
}

View File

@ -46,7 +46,7 @@ public class PythonResponseWriter implements QueryResponseWriter {
}
}
class PythonWriter extends NaNFloatWriter {
class PythonWriter extends JSONResponseWriter.NaNFloatWriter {
@Override
protected String getNaN() { return "float('NaN')"; }
@Override

View File

@ -46,7 +46,7 @@ public void write(Writer writer, SolrQueryRequest req, SolrQueryResponse rsp) th
}
}
class RubyWriter extends NaNFloatWriter {
class RubyWriter extends JSONResponseWriter.NaNFloatWriter {
@Override
protected String getNaN() { return "(0.0/0.0)"; }

View File

@ -16,6 +16,7 @@
*/
package org.apache.solr.search;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.time.Duration;
import java.util.Collections;
@ -228,7 +229,7 @@ public class CaffeineCache<K, V> extends SolrCacheBase implements SolrCache<K, V
}
@Override
public void close() throws Exception {
public void close() throws IOException {
SolrCache.super.close();
cache.invalidateAll();
cache.cleanUp();

View File

@ -33,6 +33,7 @@ import org.slf4j.LoggerFactory;
* instantiate it on demand (and the need to put "searcher" in the map)
* @lucene.experimental
*/
@SuppressWarnings("rawtypes")
public class QueryContext extends IdentityHashMap implements Closeable {
// private IdentityHashMap map; // we are the map for now (for compat w/ ValueSource)
private final SolrIndexSearcher searcher;

View File

@ -18,6 +18,7 @@ package org.apache.solr.search;
import org.apache.solr.core.SolrInfoBean;
import java.io.IOException;
import java.util.Map;
import java.util.function.Function;
@ -150,7 +151,7 @@ public interface SolrCache<K,V> extends SolrInfoBean {
/** Frees any non-memory resources */
default void close() throws Exception {
default void close() throws IOException {
SolrInfoBean.super.close();
}

View File

@ -44,13 +44,14 @@ public abstract class AggValueSource extends ValueSource {
}
@Override
@SuppressWarnings({"rawtypes"})
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
// FUTURE
throw new UnsupportedOperationException("NOT IMPLEMENTED " + name + " " + this);
}
// TODO: make abstract
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
throw new UnsupportedOperationException("NOT IMPLEMENTED " + name + " " + this);
}

View File

@ -37,7 +37,7 @@ public class AvgAgg extends SimpleAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
ValueSource vs = getArg();
if (vs instanceof FieldNameValueSource) {
@ -62,7 +62,7 @@ public class AvgAgg extends SimpleAggValueSource {
}
vs = sf.getType().getValueSource(sf, null);
}
return new AvgSlotAcc(vs, fcontext, numSlots);
return new SlotAcc.AvgSlotAcc(vs, fcontext, numSlots);
}
@Override
@ -70,12 +70,13 @@ public class AvgAgg extends SimpleAggValueSource {
return new Merger();
}
private static class Merger extends FacetDoubleMerger {
private static class Merger extends FacetModule.FacetDoubleMerger {
long num;
double sum;
@Override
public void merge(Object facetResult, Context mcontext1) {
@SuppressWarnings({"unchecked"})
List<Number> numberList = (List<Number>) facetResult;
num += numberList.get(0).longValue();
sum += numberList.get(1).doubleValue();
@ -88,10 +89,10 @@ public class AvgAgg extends SimpleAggValueSource {
}
}
class AvgSortedNumericAcc extends DoubleSortedNumericDVAcc {
class AvgSortedNumericAcc extends DocValuesAcc.DoubleSortedNumericDVAcc {
int[] counts;
public AvgSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public AvgSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
this.counts = new int[numSlots];
}
@ -114,6 +115,7 @@ public class AvgAgg extends SimpleAggValueSource {
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(2);
@ -138,10 +140,10 @@ public class AvgAgg extends SimpleAggValueSource {
}
}
class AvgSortedSetAcc extends DoubleSortedSetDVAcc {
class AvgSortedSetAcc extends DocValuesAcc.DoubleSortedSetDVAcc {
int[] counts;
public AvgSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public AvgSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
this.counts = new int[numSlots];
}
@ -168,6 +170,7 @@ public class AvgAgg extends SimpleAggValueSource {
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(2);
@ -192,10 +195,10 @@ public class AvgAgg extends SimpleAggValueSource {
}
}
class AvgUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
class AvgUnInvertedFieldAcc extends UnInvertedFieldAcc.DoubleUnInvertedFieldAcc {
int[] counts;
public AvgUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public AvgUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
this.counts = new int[numSlots];
}
@ -224,6 +227,7 @@ public class AvgAgg extends SimpleAggValueSource {
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(2);

View File

@ -24,12 +24,12 @@ public class CountAgg extends SimpleAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
return new CountSlotArrAcc(fcontext, numSlots);
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
return new SlotAcc.CountSlotArrAcc(fcontext, numSlots);
}
@Override
public FacetMerger createFacetMerger(Object prototype) {
return new FacetLongMerger();
return new FacetModule.FacetLongMerger();
}
}

View File

@ -37,7 +37,7 @@ public class CountValsAgg extends SimpleAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
ValueSource vs = getArg();
if (vs instanceof FieldNameValueSource) {
String field = ((FieldNameValueSource)vs).getFieldName();
@ -64,12 +64,12 @@ public class CountValsAgg extends SimpleAggValueSource {
@Override
public FacetMerger createFacetMerger(Object prototype) {
return new FacetLongMerger();
return new FacetModule.FacetLongMerger();
}
class CountValSlotAcc extends LongFuncSlotAcc {
class CountValSlotAcc extends SlotAcc.LongFuncSlotAcc {
public CountValSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
public CountValSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots, 0);
}
@ -81,9 +81,9 @@ public class CountValsAgg extends SimpleAggValueSource {
}
}
class CountSortedNumericDVAcc extends LongSortedNumericDVAcc {
class CountSortedNumericDVAcc extends DocValuesAcc.LongSortedNumericDVAcc {
public CountSortedNumericDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public CountSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
}
@ -93,9 +93,9 @@ public class CountValsAgg extends SimpleAggValueSource {
}
}
class CountSortedSetDVAcc extends LongSortedSetDVAcc {
class CountSortedSetDVAcc extends DocValuesAcc.LongSortedSetDVAcc {
public CountSortedSetDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public CountSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
}
@ -111,7 +111,7 @@ public class CountValsAgg extends SimpleAggValueSource {
private int currentSlot;
long[] result;
public CountMultiValuedAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public CountMultiValuedAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
result = new long[numSlots];
}

View File

@ -39,7 +39,7 @@ import org.apache.solr.schema.SchemaField;
public abstract class DocValuesAcc extends SlotAcc {
SchemaField sf;
public DocValuesAcc(FacetRequest.FacetContext fcontext, SchemaField sf) throws IOException {
public DocValuesAcc(FacetContext fcontext, SchemaField sf) throws IOException {
super(fcontext);
this.sf = sf;
}
@ -58,368 +58,371 @@ public abstract class DocValuesAcc extends SlotAcc {
* returns whether or not given {@code doc} has value
*/
protected abstract boolean advanceExact(int doc) throws IOException;
}
/**
* Accumulator for {@link NumericDocValues}
*/
abstract class NumericDVAcc extends DocValuesAcc {
NumericDocValues values;
public NumericDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf) throws IOException {
super(fcontext, sf);
}
/**
* Accumulator for {@link NumericDocValues}
*/
abstract class NumericDVAcc extends DocValuesAcc {
NumericDocValues values;
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
super.setNextReader(readerContext);
values = DocValues.getNumeric(readerContext.reader(), sf.getName());
}
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
}
/**
* Accumulator for {@link SortedNumericDocValues}
*/
abstract class SortedNumericDVAcc extends DocValuesAcc {
SortedNumericDocValues values;
public SortedNumericDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf);
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
super.setNextReader(readerContext);
values = DocValues.getSortedNumeric(readerContext.reader(), sf.getName());
}
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
}
abstract class LongSortedNumericDVAcc extends SortedNumericDVAcc {
long[] result;
long initialValue;
public LongSortedNumericDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
super(fcontext, sf, numSlots);
this.result = new long[numSlots];
this.initialValue = initialValue;
if (initialValue != 0) {
Arrays.fill(result, initialValue);
public NumericDVAcc(FacetContext fcontext, SchemaField sf) throws IOException {
super(fcontext, sf);
}
}
@Override
public int compare(int slotA, int slotB) {
return Long.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void reset() throws IOException {
Arrays.fill(result, initialValue);
}
@Override
public void resize(Resizer resizer) {
this.result = resizer.resize(result, initialValue);
}
}
abstract class DoubleSortedNumericDVAcc extends SortedNumericDVAcc {
double[] result;
double initialValue;
public DoubleSortedNumericDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException {
super(fcontext, sf, numSlots);
this.result = new double[numSlots];
this.initialValue = initialValue;
if (initialValue != 0) {
Arrays.fill(result, initialValue);
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
super.setNextReader(readerContext);
values = DocValues.getNumeric(readerContext.reader(), sf.getName());
}
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void reset() throws IOException {
Arrays.fill(result, initialValue);
}
@Override
public void resize(Resizer resizer) {
this.result = resizer.resize(result, initialValue);
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
}
/**
* converts given long value to double based on field type
* Accumulator for {@link SortedNumericDocValues}
*/
protected double getDouble(long val) {
switch (sf.getType().getNumberType()) {
case INTEGER:
case LONG:
case DATE:
return val;
case FLOAT:
return NumericUtils.sortableIntToFloat((int) val);
case DOUBLE:
return NumericUtils.sortableLongToDouble(val);
default:
// this would never happen
return 0.0d;
abstract static class SortedNumericDVAcc extends DocValuesAcc {
SortedNumericDocValues values;
public SortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf);
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
super.setNextReader(readerContext);
values = DocValues.getSortedNumeric(readerContext.reader(), sf.getName());
}
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
}
}
abstract static class LongSortedNumericDVAcc extends SortedNumericDVAcc {
long[] result;
long initialValue;
/**
* Base class for standard deviation and variance computation for fields with {@link SortedNumericDocValues}
*/
abstract class SDVSortedNumericAcc extends DoubleSortedNumericDVAcc {
int[] counts;
double[] sum;
public SDVSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
this.counts = new int[numSlots];
this.sum = new double[numSlots];
}
@Override
protected void collectValues(int doc, int slot) throws IOException {
for (int i = 0, count = values.docValueCount(); i < count; i++) {
double val = getDouble(values.nextValue());
result[slot]+= val * val;
sum[slot]+= val;
counts[slot]++;
public LongSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
super(fcontext, sf, numSlots);
this.result = new long[numSlots];
this.initialValue = initialValue;
if (initialValue != 0) {
Arrays.fill(result, initialValue);
}
}
}
protected abstract double computeVal(int slot);
@Override
public int compare(int slotA, int slotB) {
return Double.compare(computeVal(slotA), computeVal(slotB));
}
@Override
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(3);
lst.add(counts[slot]);
lst.add(result[slot]);
lst.add(sum[slot]);
return lst;
} else {
return computeVal(slot);
@Override
public int compare(int slotA, int slotB) {
return Long.compare(result[slotA], result[slotB]);
}
}
@Override
public void reset() throws IOException {
super.reset();
Arrays.fill(counts, 0);
Arrays.fill(sum, 0);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
this.counts = resizer.resize(counts, 0);
this.sum = resizer.resize(sum, 0);
}
}
/**
* Accumulator for {@link SortedDocValues}
*/
abstract class SortedDVAcc extends DocValuesAcc {
SortedDocValues values;
public SortedDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf) throws IOException {
super(fcontext, sf);
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
super.setNextReader(readerContext);
values = DocValues.getSorted(readerContext.reader(), sf.getName());
}
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
}
/**
* Accumulator for {@link SortedSetDocValues}
*/
abstract class SortedSetDVAcc extends DocValuesAcc {
SortedSetDocValues values;
public SortedSetDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf);
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
super.setNextReader(readerContext);
values = DocValues.getSortedSet(readerContext.reader(), sf.getName());
}
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
}
abstract class LongSortedSetDVAcc extends SortedSetDVAcc {
long[] result;
long initialValue;
public LongSortedSetDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
super(fcontext, sf, numSlots);
result = new long[numSlots];
this.initialValue = initialValue;
if (initialValue != 0) {
@Override
public void reset() throws IOException {
Arrays.fill(result, initialValue);
}
}
@Override
public int compare(int slotA, int slotB) {
return Long.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void reset() throws IOException {
Arrays.fill(result, initialValue);
}
@Override
public void resize(Resizer resizer) {
@Override
public void resize(Resizer resizer) {
this.result = resizer.resize(result, initialValue);
}
}
}
abstract class DoubleSortedSetDVAcc extends SortedSetDVAcc {
double[] result;
double initialValue;
abstract static class DoubleSortedNumericDVAcc extends SortedNumericDVAcc {
double[] result;
double initialValue;
public DoubleSortedSetDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
super(fcontext, sf, numSlots);
result = new double[numSlots];
this.initialValue = initialValue;
if (initialValue != 0) {
public DoubleSortedNumericDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException {
super(fcontext, sf, numSlots);
this.result = new double[numSlots];
this.initialValue = initialValue;
if (initialValue != 0) {
Arrays.fill(result, initialValue);
}
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void reset() throws IOException {
Arrays.fill(result, initialValue);
}
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void reset() throws IOException {
Arrays.fill(result, initialValue);
}
@Override
public void resize(Resizer resizer) {
@Override
public void resize(Resizer resizer) {
this.result = resizer.resize(result, initialValue);
}
}
/**
* Base class for standard deviation and variance computation for fields with {@link SortedSetDocValues}
*/
abstract class SDVSortedSetAcc extends DoubleSortedSetDVAcc {
int[] counts;
double[] sum;
public SDVSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
this.counts = new int[numSlots];
this.sum = new double[numSlots];
}
@Override
protected void collectValues(int doc, int slot) throws IOException {
long ord;
while ((ord = values.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
BytesRef term = values.lookupOrd(ord);
Object obj = sf.getType().toObject(sf, term);
double val = obj instanceof Date ? ((Date)obj).getTime(): ((Number)obj).doubleValue();
result[slot] += val * val;
sum[slot] += val;
counts[slot]++;
}
}
protected abstract double computeVal(int slot);
@Override
public int compare(int slotA, int slotB) {
return Double.compare(computeVal(slotA), computeVal(slotB));
}
@Override
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(3);
lst.add(counts[slot]);
lst.add(result[slot]);
lst.add(sum[slot]);
return lst;
} else {
return computeVal(slot);
/**
* converts given long value to double based on field type
*/
protected double getDouble(long val) {
switch (sf.getType().getNumberType()) {
case INTEGER:
case LONG:
case DATE:
return val;
case FLOAT:
return NumericUtils.sortableIntToFloat((int) val);
case DOUBLE:
return NumericUtils.sortableLongToDouble(val);
default:
// this would never happen
return 0.0d;
}
}
}
@Override
public void reset() throws IOException {
super.reset();
Arrays.fill(counts, 0);
Arrays.fill(sum, 0);
}
/**
* Base class for standard deviation and variance computation for fields with {@link SortedNumericDocValues}
*/
abstract static class SDVSortedNumericAcc extends DoubleSortedNumericDVAcc {
int[] counts;
double[] sum;
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
public SDVSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
this.counts = new int[numSlots];
this.sum = new double[numSlots];
}
@Override
protected void collectValues(int doc, int slot) throws IOException {
for (int i = 0, count = values.docValueCount(); i < count; i++) {
double val = getDouble(values.nextValue());
result[slot] += val * val;
sum[slot] += val;
counts[slot]++;
}
}
protected abstract double computeVal(int slot);
@Override
public int compare(int slotA, int slotB) {
return Double.compare(computeVal(slotA), computeVal(slotB));
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(3);
lst.add(counts[slot]);
lst.add(result[slot]);
lst.add(sum[slot]);
return lst;
} else {
return computeVal(slot);
}
}
@Override
public void reset() throws IOException {
super.reset();
Arrays.fill(counts, 0);
Arrays.fill(sum, 0);
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
this.counts = resizer.resize(counts, 0);
this.sum = resizer.resize(sum, 0);
}
}
/**
* Accumulator for {@link SortedDocValues}
*/
abstract class SortedDVAcc extends DocValuesAcc {
SortedDocValues values;
public SortedDVAcc(FacetContext fcontext, SchemaField sf) throws IOException {
super(fcontext, sf);
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
super.setNextReader(readerContext);
values = DocValues.getSorted(readerContext.reader(), sf.getName());
}
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
}
/**
* Accumulator for {@link SortedSetDocValues}
*/
abstract static class SortedSetDVAcc extends DocValuesAcc {
SortedSetDocValues values;
public SortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf);
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
super.setNextReader(readerContext);
values = DocValues.getSortedSet(readerContext.reader(), sf.getName());
}
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
}
abstract static class LongSortedSetDVAcc extends SortedSetDVAcc {
long[] result;
long initialValue;
public LongSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
super(fcontext, sf, numSlots);
result = new long[numSlots];
this.initialValue = initialValue;
if (initialValue != 0) {
Arrays.fill(result, initialValue);
}
}
@Override
public int compare(int slotA, int slotB) {
return Long.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void reset() throws IOException {
Arrays.fill(result, initialValue);
}
@Override
public void resize(Resizer resizer) {
this.result = resizer.resize(result, initialValue);
}
}
abstract static class DoubleSortedSetDVAcc extends SortedSetDVAcc {
double[] result;
double initialValue;
public DoubleSortedSetDVAcc(FacetContext fcontext, SchemaField sf, int numSlots, long initialValue) throws IOException {
super(fcontext, sf, numSlots);
result = new double[numSlots];
this.initialValue = initialValue;
if (initialValue != 0) {
Arrays.fill(result, initialValue);
}
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void reset() throws IOException {
Arrays.fill(result, initialValue);
}
@Override
public void resize(Resizer resizer) {
this.result = resizer.resize(result, initialValue);
}
}
/**
* Base class for standard deviation and variance computation for fields with {@link SortedSetDocValues}
*/
abstract static class SDVSortedSetAcc extends DoubleSortedSetDVAcc {
int[] counts;
double[] sum;
public SDVSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
this.counts = new int[numSlots];
this.sum = new double[numSlots];
}
@Override
protected void collectValues(int doc, int slot) throws IOException {
long ord;
while ((ord = values.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
BytesRef term = values.lookupOrd(ord);
Object obj = sf.getType().toObject(sf, term);
double val = obj instanceof Date ? ((Date) obj).getTime() : ((Number) obj).doubleValue();
result[slot] += val * val;
sum[slot] += val;
counts[slot]++;
}
}
protected abstract double computeVal(int slot);
@Override
public int compare(int slotA, int slotB) {
return Double.compare(computeVal(slotA), computeVal(slotB));
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(3);
lst.add(counts[slot]);
lst.add(result[slot]);
lst.add(sum[slot]);
return lst;
} else {
return computeVal(slot);
}
}
@Override
public void reset() throws IOException {
super.reset();
Arrays.fill(counts, 0);
Arrays.fill(sum, 0);
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
this.counts = resizer.resize(counts, 0);
this.sum = resizer.resize(sum, 0);
}
}
}

View File

@ -24,14 +24,17 @@ import java.util.Map;
import org.apache.solr.common.util.SimpleOrderedMap;
public class FacetBucket {
final FacetBucketMerger parent;
@SuppressWarnings("rawtypes")
final FacetModule.FacetBucketMerger parent;
@SuppressWarnings({"rawtypes"})
final Comparable bucketValue;
final int bucketNumber; // this is just for internal correlation (the first bucket created is bucket 0, the next bucket 1, across all field buckets)
long count;
Map<String, FacetMerger> subs;
public FacetBucket(FacetBucketMerger parent, Comparable bucketValue, FacetMerger.Context mcontext) {
public FacetBucket(@SuppressWarnings("rawtypes") FacetModule.FacetBucketMerger parent
, @SuppressWarnings("rawtypes") Comparable bucketValue, FacetMerger.Context mcontext) {
this.parent = parent;
this.bucketValue = bucketValue;
this.bucketNumber = mcontext.getNewBucketNumber(); // TODO: we don't need bucket numbers for all buckets...
@ -66,7 +69,7 @@ public class FacetBucket {
return merger;
}
public void mergeBucket(SimpleOrderedMap bucket, FacetMerger.Context mcontext) {
public void mergeBucket(@SuppressWarnings("rawtypes") SimpleOrderedMap bucket, FacetMerger.Context mcontext) {
// todo: for refinements, we want to recurse, but not re-do stats for intermediate buckets
mcontext.setShardFlag(bucketNumber);
@ -93,6 +96,7 @@ public class FacetBucket {
}
@SuppressWarnings({"rawtypes", "unchecked"})
public SimpleOrderedMap getMergedBucket() {
SimpleOrderedMap out = new SimpleOrderedMap( (subs == null ? 0 : subs.size()) + 2 );
if (bucketValue != null) {

View File

@ -0,0 +1,74 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search.facet;
import java.util.Map;
import org.apache.lucene.search.Query;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.QueryContext;
import org.apache.solr.search.SolrIndexSearcher;
public class FacetContext {
// Context info for actually executing a local facet command
public static final int IS_SHARD=0x01;
public static final int IS_REFINEMENT=0x02;
public static final int SKIP_FACET=0x04; // refinement: skip calculating this immediate facet, but proceed to specific sub-facets based on facetInfo
FacetProcessor<?> processor;
Map<String,Object> facetInfo; // refinement info for this node
QueryContext qcontext;
SolrQueryRequest req; // TODO: replace with params?
SolrIndexSearcher searcher;
Query filter; // TODO: keep track of as a DocSet or as a Query?
DocSet base;
FacetContext parent;
int flags;
FacetDebugInfo debugInfo;
public void setDebugInfo(FacetDebugInfo debugInfo) {
this.debugInfo = debugInfo;
}
public FacetDebugInfo getDebugInfo() {
return debugInfo;
}
public boolean isShard() {
return (flags & IS_SHARD) != 0;
}
/**
* @param filter The filter for the bucket that resulted in this context/domain. Can be null if this is the root context.
* @param domain The resulting set of documents for this facet.
*/
public FacetContext sub(Query filter, DocSet domain) {
FacetContext ctx = new FacetContext();
ctx.parent = this;
ctx.base = domain;
ctx.filter = filter;
// carry over from parent
ctx.flags = flags;
ctx.qcontext = qcontext;
ctx.req = req;
ctx.searcher = searcher;
return ctx;
}
}

View File

@ -65,7 +65,7 @@ public class FacetDebugInfo {
return info;
}
public SimpleOrderedMap getFacetDebugInfo() {
public SimpleOrderedMap<Object> getFacetDebugInfo() {
SimpleOrderedMap<Object> info = new SimpleOrderedMap<>();
if (filter != null) info.add("filter", filter);

View File

@ -24,50 +24,6 @@ import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.NumberType;
import org.apache.solr.schema.SchemaField;
// Any type of facet request that generates a variable number of buckets
// and the ability to sort by those generated buckets.
abstract class FacetRequestSorted extends FacetRequest {
long offset;
long limit;
/**
* Number of buckets to request beyond the limit to do internally during initial distributed search.
* -1 means default heuristic.
*/
int overrequest = -1;
/**
* Number of buckets to fill in beyond the limit to do internally during refinement of distributed search.
* -1 means default heuristic.
*/
int overrefine = -1;
long mincount;
/**
* The basic sorting to do on buckets, defaults to {@link FacetRequest.FacetSort#COUNT_DESC}
* @see #prelim_sort
*/
FacetSort sort;
/**
* An optional "Pre-Sort" that defaults to null.
* If specified, then the <code>prelim_sort</code> is used as an optimization in place of {@link #sort}
* during collection, and the full {@link #sort} values are only computed for the top candidate buckets
* (after refinement)
*/
FacetSort prelim_sort;
RefineMethod refine; // null, NONE, or SIMPLE
@Override
public RefineMethod getRefineMethod() {
return refine;
}
@Override
public boolean returnsPartial() {
return super.returnsPartial() || (limit > 0);
}
}
public class FacetField extends FacetRequestSorted {
public static final int DEFAULT_FACET_LIMIT = 10;
String field;
@ -114,6 +70,7 @@ public class FacetField extends FacetRequestSorted {
}
@Override
@SuppressWarnings("rawtypes")
public FacetProcessor createFacetProcessor(FacetContext fcontext) {
SchemaField sf = fcontext.searcher.getSchema().getField(field);
FieldType ft = sf.getType();

View File

@ -45,6 +45,7 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public void merge(Object facetResult, Context mcontext) {
super.merge(facetResult, mcontext);
if (numReturnedPerShard == null) {
@ -53,7 +54,7 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
merge((SimpleOrderedMap)facetResult, mcontext);
}
protected void merge(SimpleOrderedMap facetResult, Context mcontext) {
protected void merge(@SuppressWarnings("rawtypes") SimpleOrderedMap facetResult, Context mcontext) {
if (freq.missing) {
Object o = facetResult.get("missing");
if (o != null) {
@ -74,6 +75,8 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
List<SimpleOrderedMap> bucketList = (List<SimpleOrderedMap>) facetResult.get("buckets");
numReturnedPerShard[mcontext.shardNum] = bucketList.size();
numReturnedBuckets += bucketList.size();
@ -95,6 +98,7 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getMergedResult() {
SimpleOrderedMap result = new SimpleOrderedMap();
@ -199,6 +203,7 @@ public class FacetFieldMerger extends FacetRequestSortedMerger<FacetField> {
Set<Object> values;
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public void merge(Object facetResult, Context mcontext) {
SimpleOrderedMap map = (SimpleOrderedMap)facetResult;
long numBuckets = ((Number)map.get("numBuckets")).longValue();

View File

@ -41,7 +41,7 @@ import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.facet.SlotAcc.SlotContext;
import static org.apache.solr.search.facet.FacetRequest.FacetContext.SKIP_FACET;
import static org.apache.solr.search.facet.FacetContext.SKIP_FACET;
/**
* Facet processing based on field values. (not range nor by query)
@ -69,7 +69,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
SpecialSlotAcc allBucketsAcc; // this can internally refer to otherAccs and/or collectAcc. setNextReader should be called on otherAccs directly if they exist.
FacetFieldProcessor(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
FacetFieldProcessor(FacetContext fcontext, FacetField freq, SchemaField sf) {
super(fcontext, freq);
this.sf = sf;
this.effectiveMincount = (int)(fcontext.isShard() ? Math.min(1 , freq.mincount) : freq.mincount);
@ -115,7 +115,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
// allow a custom count acc to be used
if (countAcc == null) {
countAcc = new CountSlotArrAcc(fcontext, slotCount);
countAcc = new SlotAcc.CountSlotArrAcc(fcontext, slotCount);
countAcc.key = "count";
}
@ -162,7 +162,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
if (indexOrderAcc == null) {
// This sorting accumulator just goes by the slot number, so does not need to be collected
// and hence does not need to find it's way into the accMap or accs array.
indexOrderAcc = new SortSlotAcc(fcontext);
indexOrderAcc = new SlotAcc.SortSlotAcc(fcontext);
}
return indexOrderAcc;
}
@ -178,7 +178,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
// we always count...
// allow a subclass to set a custom counter.
if (countAcc == null) {
countAcc = new CountSlotArrAcc(fcontext, numSlots);
countAcc = new SlotAcc.CountSlotArrAcc(fcontext, numSlots);
}
sortAcc = getTrivialSortingSlotAcc(this.sort);
@ -292,8 +292,8 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
/** Processes the collected data to finds the top slots, and composes it in the response NamedList. */
SimpleOrderedMap<Object> findTopSlots(final int numSlots, final int slotCardinality,
IntFunction<Comparable> bucketValFromSlotNumFunc,
Function<Comparable, String> fieldQueryValFunc) throws IOException {
@SuppressWarnings("rawtypes") IntFunction<Comparable> bucketValFromSlotNumFunc,
@SuppressWarnings("rawtypes") Function<Comparable, String> fieldQueryValFunc) throws IOException {
assert this.sortAcc != null;
long numBuckets = 0;
@ -437,6 +437,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
sortedSlots = Arrays.copyOfRange(sortedSlots, off, endOffset);
}
}
@SuppressWarnings({"rawtypes"})
List<SimpleOrderedMap> bucketList = new ArrayList<>(sortedSlots.length);
for (Slot slot : sortedSlots) {
@ -492,6 +493,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
int slot;
/** filled in only once we know the bucket will either be involved in resorting, or returned */
@SuppressWarnings({"rawtypes"})
Comparable bucketVal;
/** Filled in if and only if needed for resorting, deferred stats, or subfacets */
@ -690,7 +692,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
static class MultiAcc extends SlotAcc {
final SlotAcc[] subAccs;
MultiAcc(FacetRequest.FacetContext fcontext, SlotAcc[] subAccs) {
MultiAcc(FacetContext fcontext, SlotAcc[] subAccs) {
super(fcontext);
this.subAccs = subAccs;
}
@ -749,7 +751,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
int otherAccsSlot;
long count;
SpecialSlotAcc(FacetRequest.FacetContext fcontext, SlotAcc collectAcc, int collectAccSlot, SlotAcc[] otherAccs, int otherAccsSlot) {
SpecialSlotAcc(FacetContext fcontext, SlotAcc collectAcc, int collectAccSlot, SlotAcc[] otherAccs, int otherAccsSlot) {
super(fcontext);
this.collectAcc = collectAcc;
this.collectAccSlot = collectAccSlot;
@ -839,10 +841,12 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
"cat1":{"_l":["A"]}}}
*/
@SuppressWarnings({"unchecked"})
static <T> List<T> asList(Object list) {
return list != null ? (List<T>)list : Collections.EMPTY_LIST;
}
@SuppressWarnings({"rawtypes", "unchecked"})
protected SimpleOrderedMap<Object> refineFacets() throws IOException {
boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0;
@ -874,6 +878,7 @@ abstract class FacetFieldProcessor extends FacetProcessor<FacetField> {
}
// The only difference between skip and missing is the value of "skip" passed to refineBucket
for (List bucketAndFacetInfo : partial) {
assert bucketAndFacetInfo.size() == 2;
Object bucketVal = bucketAndFacetInfo.get(0);

View File

@ -28,7 +28,7 @@ import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.facet.SlotAcc.SlotContext;
import static org.apache.solr.search.facet.FacetRequest.FacetContext.SKIP_FACET;
import static org.apache.solr.search.facet.FacetContext.SKIP_FACET;
/**
* Base class for DV/UIF accumulating counts into an array by ordinal. It's
@ -45,7 +45,7 @@ abstract class FacetFieldProcessorByArray extends FacetFieldProcessor {
int allBucketsSlot = -1; // slot for the primary Accs (countAcc, collectAcc)
FacetFieldProcessorByArray(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
FacetFieldProcessorByArray(FacetContext fcontext, FacetField freq, SchemaField sf) {
super(fcontext, freq, sf);
}

View File

@ -46,7 +46,7 @@ class FacetFieldProcessorByArrayDV extends FacetFieldProcessorByArray {
SortedSetDocValues si; // only used for term lookups (for both single and multi-valued)
OrdinalMap ordinalMap = null; // maps per-segment ords to global ords
FacetFieldProcessorByArrayDV(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
FacetFieldProcessorByArrayDV(FacetContext fcontext, FacetField freq, SchemaField sf) {
super(fcontext, freq, sf);
multiValuedField = sf.multiValued() || sf.getType().multiValuedFieldCache();
}

View File

@ -30,7 +30,7 @@ class FacetFieldProcessorByArrayUIF extends FacetFieldProcessorByArray {
UnInvertedField uif;
TermsEnum te;
FacetFieldProcessorByArrayUIF(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
FacetFieldProcessorByArrayUIF(FacetContext fcontext, FacetField freq, SchemaField sf) {
super(fcontext, freq, sf);
if (! sf.isUninvertible()) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,

View File

@ -72,7 +72,7 @@ class FacetFieldProcessorByEnumTermsStream extends FacetFieldProcessor implement
LeafReaderContext[] leaves;
FacetFieldProcessorByEnumTermsStream(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
FacetFieldProcessorByEnumTermsStream(FacetContext fcontext, FacetField freq, SchemaField sf) {
super(fcontext, freq, sf);
}
@ -85,6 +85,7 @@ class FacetFieldProcessorByEnumTermsStream extends FacetFieldProcessor implement
}
@Override
@SuppressWarnings({"rawtypes"})
public void process() throws IOException {
super.process();

View File

@ -162,6 +162,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
/** To be returned in "buckets"/"val" */
@Override
@SuppressWarnings({"rawtypes"})
public Comparable bitsToValue(long globalOrd) {
BytesRef bytesRef = lookupOrdFunction.apply((int) globalOrd);
// note FacetFieldProcessorByArray.findTopSlots also calls SchemaFieldType.toObject
@ -169,16 +170,19 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
}
@Override
@SuppressWarnings({"rawtypes"})
public String formatValue(Comparable val) {
return (String) val;
}
@Override
@SuppressWarnings({"rawtypes"})
protected Comparable parseStr(String rawval) throws ParseException {
throw new UnsupportedOperationException();
}
@Override
@SuppressWarnings({"rawtypes"})
protected Comparable parseAndAddGap(Comparable value, String gap) throws ParseException {
throw new UnsupportedOperationException();
}
@ -189,7 +193,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
LongCounts table;
int allBucketsSlot = -1;
FacetFieldProcessorByHashDV(FacetRequest.FacetContext fcontext, FacetField freq, SchemaField sf) {
FacetFieldProcessorByHashDV(FacetContext fcontext, FacetField freq, SchemaField sf) {
super(fcontext, freq, sf);
if (freq.mincount == 0) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
@ -285,7 +289,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
}
};
countAcc = new CountSlotAcc(fcontext) {
countAcc = new SlotAcc.CountSlotAcc(fcontext) {
@Override
public void incrementCount(int slot, long count) {
throw new UnsupportedOperationException();
@ -437,6 +441,7 @@ class FacetFieldProcessorByHashDV extends FacetFieldProcessor {
*/
private IntFunction<SlotContext> slotContext = (slotNum) -> {
long val = table.vals[slotNum];
@SuppressWarnings({"rawtypes"})
Comparable value = calc.bitsToValue(val);
return new SlotContext(sf.getType().getFieldQuery(null, sf, calc.formatValue(value)));
};

View File

@ -94,6 +94,7 @@ public class FacetHeatmap extends FacetRequest {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
static class Parser extends FacetParser<FacetHeatmap> {
@SuppressWarnings({"rawtypes"})
Parser(FacetParser parent, String key) {
super(parent, key);
}
@ -117,6 +118,7 @@ public class FacetHeatmap extends FacetRequest {
final DistanceUnits distanceUnits;
// note: the two instanceof conditions is not ideal, versus one. If we start needing to add more then refactor.
if ((type instanceof AbstractSpatialPrefixTreeFieldType)) {
@SuppressWarnings({"rawtypes"})
AbstractSpatialPrefixTreeFieldType rptType = (AbstractSpatialPrefixTreeFieldType) type;
strategy = (PrefixTreeStrategy) rptType.getStrategy(fieldName);
distanceUnits = rptType.getDistanceUnits();
@ -204,17 +206,21 @@ public class FacetHeatmap extends FacetRequest {
}
@Override
@SuppressWarnings({"rawtypes"})
public FacetProcessor createFacetProcessor(FacetContext fcontext) {
return new FacetHeatmapProcessor(fcontext);
}
// don't use an anonymous class since the getSimpleName() isn't friendly in debug output
@SuppressWarnings({"rawtypes"})
private class FacetHeatmapProcessor extends FacetProcessor {
@SuppressWarnings({"unchecked"})
public FacetHeatmapProcessor(FacetContext fcontext) {
super(fcontext, FacetHeatmap.this);
}
@Override
@SuppressWarnings({"unchecked"})
public void process() throws IOException {
super.process(); // handles domain changes
@ -233,7 +239,7 @@ public class FacetHeatmap extends FacetRequest {
}
//Populate response
response = new SimpleOrderedMap();
response = new SimpleOrderedMap<>();
response.add("gridLevel", gridLevel);
response.add("columns", heatmap.columns);
response.add("rows", heatmap.rows);

View File

@ -51,8 +51,8 @@ public class FacetModule extends SearchComponent {
// The largest current flag in ShardRequest is 0x00002000
// We'll put our bits in the middle to avoid future ones in ShardRequest and
// custom ones that may start at the top.
public final static int PURPOSE_GET_JSON_FACETS = 0x00100000;
public final static int PURPOSE_REFINE_JSON_FACETS = 0x00200000;
public final static int PURPOSE_GET_JSON_FACETS = 0x00100000;
public final static int PURPOSE_REFINE_JSON_FACETS = 0x00200000;
// Internal information passed down from the top level to shards for distributed faceting.
private final static String FACET_INFO = "_facet_";
@ -67,11 +67,12 @@ public class FacetModule extends SearchComponent {
@Override
@SuppressWarnings({"unchecked"})
public void prepare(ResponseBuilder rb) throws IOException {
Map<String,Object> json = rb.req.getJSON();
Map<String,Object> jsonFacet = null;
Map<String, Object> json = rb.req.getJSON();
Map<String, Object> jsonFacet = null;
if (json == null) {
int version = rb.req.getParams().getInt("facet.version",1);
int version = rb.req.getParams().getInt("facet.version", 1);
if (version <= 1) return;
boolean facetsEnabled = rb.req.getParams().getBool(FacetParams.FACET, false);
if (!facetsEnabled) return;
@ -90,14 +91,15 @@ public class FacetModule extends SearchComponent {
SolrParams params = rb.req.getParams();
boolean isShard = params.getBool(ShardParams.IS_SHARD, false);
Map<String,Object> facetInfo = null;
@SuppressWarnings({"unchecked"})
Map<String, Object> facetInfo = null;
if (isShard) {
String jfacet = params.get(FACET_INFO);
if (jfacet == null) {
// if this is a shard request, but there is no _facet_ info, then don't do anything.
return;
}
facetInfo = (Map<String,Object>) fromJSONString(jfacet);
facetInfo = (Map<String, Object>) fromJSONString(jfacet);
}
// At this point, we know we need to do something. Create and save the state.
@ -118,6 +120,7 @@ public class FacetModule extends SearchComponent {
@Override
@SuppressWarnings({"unchecked"})
public void process(ResponseBuilder rb) throws IOException {
// if this is null, faceting is not enabled
FacetComponentState facetState = getFacetComponentState(rb);
@ -125,17 +128,17 @@ public class FacetModule extends SearchComponent {
boolean isShard = rb.req.getParams().getBool(ShardParams.IS_SHARD, false);
FacetRequest.FacetContext fcontext = new FacetRequest.FacetContext();
FacetContext fcontext = new FacetContext();
fcontext.base = rb.getResults().docSet;
fcontext.req = rb.req;
fcontext.searcher = rb.req.getSearcher();
fcontext.qcontext = QueryContext.newContext(fcontext.searcher);
if (isShard) {
fcontext.flags |= FacetRequest.FacetContext.IS_SHARD;
fcontext.facetInfo = facetState.facetInfo.isEmpty() ? null : (Map<String,Object>)facetState.facetInfo.get(FACET_REFINE);
fcontext.flags |= FacetContext.IS_SHARD;
fcontext.facetInfo = facetState.facetInfo.isEmpty() ? null : (Map<String, Object>) facetState.facetInfo.get(FACET_REFINE);
if (fcontext.facetInfo != null) {
fcontext.flags |= FacetRequest.FacetContext.IS_REFINEMENT;
fcontext.flags |= FacetRequest.FacetContext.SKIP_FACET; // the root bucket should have been received from all shards previously
fcontext.flags |= FacetContext.IS_REFINEMENT;
fcontext.flags |= FacetContext.SKIP_FACET; // the root bucket should have been received from all shards previously
}
}
if (rb.isDebug()) {
@ -170,7 +173,7 @@ public class FacetModule extends SearchComponent {
}
// Check if there are any refinements possible
if ((facetState.mcontext==null) ||facetState.mcontext.getSubsWithRefinement(facetState.facetRequest).isEmpty()) {
if ((facetState.mcontext == null) || facetState.mcontext.getSubsWithRefinement(facetState.facetRequest).isEmpty()) {
clearFaceting(rb.outgoing);
return ResponseBuilder.STAGE_DONE;
}
@ -187,7 +190,7 @@ public class FacetModule extends SearchComponent {
facetState.mcontext.setShard(shard);
// shard-specific refinement
Map<String,Object> refinement = facetState.merger.getRefinement(facetState.mcontext);
Map<String, Object> refinement = facetState.merger.getRefinement(facetState.mcontext);
if (refinement == null) continue;
boolean newRequest = false;
@ -197,11 +200,10 @@ public class FacetModule extends SearchComponent {
// If nshards becomes too great, we may want to move to hashing for
// better scalability.
for (ShardRequest sreq : rb.outgoing) {
if ( (sreq.purpose & (ShardRequest.PURPOSE_GET_FIELDS|ShardRequest.PURPOSE_REFINE_FACETS|ShardRequest.PURPOSE_REFINE_PIVOT_FACETS)) != 0
if ((sreq.purpose & (ShardRequest.PURPOSE_GET_FIELDS | ShardRequest.PURPOSE_REFINE_FACETS | ShardRequest.PURPOSE_REFINE_PIVOT_FACETS)) != 0
&& sreq.shards != null
&& sreq.shards.length == 1
&& sreq.shards[0].equals(shard))
{
&& sreq.shards[0].equals(shard)) {
shardsRefineRequest = sreq;
break;
}
@ -212,7 +214,7 @@ public class FacetModule extends SearchComponent {
// so create one ourselves.
newRequest = true;
shardsRefineRequest = new ShardRequest();
shardsRefineRequest.shards = new String[] { shard };
shardsRefineRequest.shards = new String[]{shard};
shardsRefineRequest.params = new ModifiableSolrParams(rb.req.getParams());
// don't request any documents
shardsRefineRequest.params.remove(CommonParams.START);
@ -222,7 +224,7 @@ public class FacetModule extends SearchComponent {
shardsRefineRequest.purpose |= PURPOSE_REFINE_JSON_FACETS;
Map<String,Object> finfo = new HashMap<>(1);
Map<String, Object> finfo = new HashMap<>(1);
finfo.put(FACET_REFINE, refinement);
// String finfoStr = JSONUtil.toJSON(finfo, -1); // this doesn't handle formatting of Date objects the way we want
@ -232,7 +234,7 @@ public class FacetModule extends SearchComponent {
public void handleUnknownClass(Object o) {
// handle date formatting correctly
if (o instanceof Date) {
String s = ((Date)o).toInstant().toString();
String s = ((Date) o).toInstant().toString();
writeString(s);
return;
}
@ -254,7 +256,7 @@ public class FacetModule extends SearchComponent {
}
@Override
public void modifyRequest(ResponseBuilder rb, SearchComponent who,ShardRequest sreq) {
public void modifyRequest(ResponseBuilder rb, SearchComponent who, ShardRequest sreq) {
FacetComponentState facetState = getFacetComponentState(rb);
if (facetState == null) return;
@ -264,8 +266,8 @@ public class FacetModule extends SearchComponent {
} else {
// turn off faceting on other requests
/*** distributedProcess will need to use other requests for refinement
sreq.params.remove("json.facet"); // this just saves space... the presence of FACET_INFO really control the faceting
sreq.params.remove(FACET_INFO);
sreq.params.remove("json.facet"); // this just saves space... the presence of FACET_INFO really control the faceting
sreq.params.remove(FACET_INFO);
**/
}
}
@ -281,15 +283,15 @@ public class FacetModule extends SearchComponent {
if (top == null) continue; // shards.tolerant=true will cause this to happen on exceptions/errors
Object facet = top.get("facets");
if (facet == null) {
SimpleOrderedMap shardResponseHeader = (SimpleOrderedMap)rsp.getResponse().get("responseHeader");
if(Boolean.TRUE.equals(shardResponseHeader.getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) {
@SuppressWarnings("rawtypes") SimpleOrderedMap shardResponseHeader = (SimpleOrderedMap) rsp.getResponse().get("responseHeader");
if (Boolean.TRUE.equals(shardResponseHeader.getBooleanArg(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY))) {
rb.rsp.getResponseHeader().asShallowMap().put(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY, Boolean.TRUE);
}
continue;
}
if (facetState.merger == null) {
facetState.merger = facetState.facetRequest.createFacetMerger(facet);
facetState.mcontext = new FacetMerger.Context( sreq.responses.size() );
facetState.mcontext = new FacetMerger.Context(sreq.responses.size());
}
if ((sreq.purpose & PURPOSE_REFINE_JSON_FACETS) != 0) {
@ -297,14 +299,14 @@ public class FacetModule extends SearchComponent {
// call merge again with a diff flag set on the context???
facetState.mcontext.root = facet;
facetState.mcontext.setShard(shardRsp.getShard()); // TODO: roll newShard into setShard?
facetState.merger.merge(facet , facetState.mcontext);
facetState.merger.merge(facet, facetState.mcontext);
return;
}
// System.err.println("MERGING FACET RESULT FROM SHARD = " + facet);
facetState.mcontext.root = facet;
facetState.mcontext.newShard(shardRsp.getShard());
facetState.merger.merge(facet , facetState.mcontext);
facetState.merger.merge(facet, facetState.mcontext);
}
}
@ -330,182 +332,181 @@ public class FacetModule extends SearchComponent {
public Category getCategory() {
return Category.QUERY;
}
}
// TODO: perhaps factor out some sort of root/parent facet object that doesn't depend
// TODO: perhaps factor out some sort of root/parent facet object that doesn't depend
// on stuff like ResponseBuilder, but contains request parameters,
// root filter lists (for filter exclusions), etc?
class FacetComponentState {
ResponseBuilder rb;
Map<String,Object> facetCommands;
FacetRequest facetRequest;
boolean isShard;
Map<String,Object> facetInfo; // _facet_ param: contains out-of-band facet info, mainly for refinement requests
class FacetComponentState {
ResponseBuilder rb;
Map<String, Object> facetCommands;
FacetRequest facetRequest;
boolean isShard;
Map<String, Object> facetInfo; // _facet_ param: contains out-of-band facet info, mainly for refinement requests
//
// Only used for distributed search
//
FacetMerger merger;
FacetMerger.Context mcontext;
}
// base class for facet functions that can be used in a sort
abstract class FacetSortableMerger extends FacetMerger {
public void prepareSort() {
//
// Only used for distributed search
//
FacetMerger merger;
FacetMerger.Context mcontext;
}
@Override
public void finish(Context mcontext) {
// nothing to do for simple stats...
// base class for facet functions that can be used in a sort
abstract static class FacetSortableMerger extends FacetMerger {
public void prepareSort() {
}
@Override
public void finish(Context mcontext) {
// nothing to do for simple stats...
}
/**
* Return the normal comparison sort order. The sort direction is only to be used in special circumstances (such as making NaN sort
* last regardless of sort order.) Normal sorters do not need to pay attention to direction.
*/
public abstract int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction);
}
/** Return the normal comparison sort order. The sort direction is only to be used in special circumstances (such as making NaN sort
* last regardless of sort order.) Normal sorters do not need to pay attention to direction.
*/
public abstract int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction);
}
abstract static class FacetDoubleMerger extends FacetSortableMerger {
@Override
public abstract void merge(Object facetResult, Context mcontext);
abstract class FacetDoubleMerger extends FacetSortableMerger {
@Override
public abstract void merge(Object facetResult, Context mcontext);
protected abstract double getDouble();
protected abstract double getDouble();
@Override
public Object getMergedResult() {
return getDouble();
}
@Override
public Object getMergedResult() {
return getDouble();
}
@Override
public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
return compare(getDouble(), ((FacetDoubleMerger)other).getDouble(), direction);
}
@Override
public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
return compare(getDouble(), ((FacetDoubleMerger) other).getDouble(), direction);
}
public static int compare(double a, double b, FacetRequest.SortDirection direction) {
if (a < b) return -1;
if (a > b) return 1;
public static int compare(double a, double b, FacetRequest.SortDirection direction) {
if (a < b) return -1;
if (a > b) return 1;
if (a != a) { // a==NaN
if (b != b) {
return 0; // both NaN
if (a != a) { // a==NaN
if (b != b) {
return 0; // both NaN
}
return -1 * direction.getMultiplier(); // asc==-1, so this will put NaN at end of sort
}
return -1 * direction.getMultiplier(); // asc==-1, so this will put NaN at end of sort
if (b != b) { // b is NaN so a is greater
return 1 * direction.getMultiplier(); // if sorting asc, make a less so NaN is at end
}
// consider +-0 to be equal
return 0;
}
}
static class FacetLongMerger extends FacetSortableMerger {
long val;
@Override
public void merge(Object facetResult, Context mcontext) {
val += ((Number) facetResult).longValue();
}
if (b != b) { // b is NaN so a is greater
return 1 * direction.getMultiplier(); // if sorting asc, make a less so NaN is at end
@Override
public Object getMergedResult() {
return val;
}
// consider +-0 to be equal
return 0;
}
}
class FacetLongMerger extends FacetSortableMerger {
long val;
@Override
public void merge(Object facetResult, Context mcontext) {
val += ((Number)facetResult).longValue();
}
@Override
public Object getMergedResult() {
return val;
}
@Override
public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
return Long.compare(val, ((FacetLongMerger)other).val);
}
}
// base class for facets that create buckets (and can hence have sub-facets)
abstract class FacetBucketMerger<FacetRequestT extends FacetRequest> extends FacetMerger {
FacetRequestT freq;
public FacetBucketMerger(FacetRequestT freq) {
this.freq = freq;
}
/** Bucketval is the representative value for the bucket. Only applicable to terms and range queries to distinguish buckets. */
FacetBucket newBucket(Comparable bucketVal, Context mcontext) {
return new FacetBucket(this, bucketVal, mcontext);
}
@Override
public Map<String, Object> getRefinement(Context mcontext) {
Collection<String> refineTags = mcontext.getSubsWithRefinement(freq);
return null; // FIXME
}
// do subs...
// callback stuff for buckets?
// passing object gives us a chance to specialize based on value
FacetMerger createFacetMerger(String key, Object val) {
FacetRequest sub = freq.getSubFacets().get(key);
if (sub != null) {
return sub.createFacetMerger(val);
}
AggValueSource subStat = freq.getFacetStats().get(key);
if (subStat != null) {
return subStat.createFacetMerger(val);
}
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "no merger for key=" + key + " , val=" + val);
}
}
class FacetQueryMerger extends FacetBucketMerger<FacetQuery> {
FacetBucket bucket;
public FacetQueryMerger(FacetQuery freq) {
super(freq);
}
@Override
public void merge(Object facet, Context mcontext) {
if (bucket == null) {
bucket = newBucket(null, mcontext);
}
bucket.mergeBucket((SimpleOrderedMap) facet, mcontext);
}
@Override
public Map<String, Object> getRefinement(Context mcontext) {
Collection<String> tags;
if (mcontext.bucketWasMissing()) {
// if this bucket was missing, we need to get all subfacets that have partials (that need to list values for refinement)
tags = mcontext.getSubsWithPartial(freq);
} else {
tags = mcontext.getSubsWithRefinement(freq);
}
Map<String,Object> refinement = bucket.getRefinement(mcontext, tags);
return refinement;
}
@Override
public void finish(Context mcontext) {
// FIXME we need to propagate!!!
}
@Override
public Object getMergedResult() {
return bucket.getMergedBucket();
@Override
public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
return Long.compare(val, ((FacetLongMerger) other).val);
}
}
// base class for facets that create buckets (and can hence have sub-facets)
abstract static class FacetBucketMerger<FacetRequestT extends FacetRequest> extends FacetMerger {
FacetRequestT freq;
public FacetBucketMerger(FacetRequestT freq) {
this.freq = freq;
}
/**
* Bucketval is the representative value for the bucket. Only applicable to terms and range queries to distinguish buckets.
*/
FacetBucket newBucket(@SuppressWarnings("rawtypes") Comparable bucketVal, Context mcontext) {
return new FacetBucket(this, bucketVal, mcontext);
}
@Override
public Map<String, Object> getRefinement(Context mcontext) {
Collection<String> refineTags = mcontext.getSubsWithRefinement(freq);
return null; // FIXME
}
// do subs...
// callback stuff for buckets?
// passing object gives us a chance to specialize based on value
FacetMerger createFacetMerger(String key, Object val) {
FacetRequest sub = freq.getSubFacets().get(key);
if (sub != null) {
return sub.createFacetMerger(val);
}
AggValueSource subStat = freq.getFacetStats().get(key);
if (subStat != null) {
return subStat.createFacetMerger(val);
}
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "no merger for key=" + key + " , val=" + val);
}
}
static class FacetQueryMerger extends FacetBucketMerger<FacetQuery> {
FacetBucket bucket;
public FacetQueryMerger(FacetQuery freq) {
super(freq);
}
@Override
public void merge(Object facet, Context mcontext) {
if (bucket == null) {
bucket = newBucket(null, mcontext);
}
bucket.mergeBucket((SimpleOrderedMap) facet, mcontext);
}
@Override
public Map<String, Object> getRefinement(Context mcontext) {
Collection<String> tags;
if (mcontext.bucketWasMissing()) {
// if this bucket was missing, we need to get all subfacets that have partials (that need to list values for refinement)
tags = mcontext.getSubsWithPartial(freq);
} else {
tags = mcontext.getSubsWithRefinement(freq);
}
Map<String, Object> refinement = bucket.getRefinement(mcontext, tags);
return refinement;
}
@Override
public void finish(Context mcontext) {
// FIXME we need to propagate!!!
}
@Override
public Object getMergedResult() {
return bucket.getMergedBucket();
}
}
}

View File

@ -0,0 +1,414 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search.facet;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.search.FunctionQParser;
import org.apache.solr.search.SyntaxError;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
abstract class FacetParser<FacetRequestT extends FacetRequest> {
protected FacetRequestT facet;
protected FacetParser<?> parent;
protected String key;
public FacetParser(FacetParser<?> parent, String key) {
this.parent = parent;
this.key = key;
}
public String getKey() {
return key;
}
public String getPathStr() {
if (parent == null) {
return "/" + key;
}
return parent.getKey() + "/" + key;
}
protected RuntimeException err(String msg) {
return new SolrException(SolrException.ErrorCode.BAD_REQUEST, msg + " , path="+getPathStr());
}
public abstract FacetRequest parse(Object o) throws SyntaxError;
// TODO: put the FacetRequest on the parser object?
public void parseSubs(Object o) throws SyntaxError {
if (o==null) return;
if (o instanceof Map) {
@SuppressWarnings({"unchecked"})
Map<String,Object> m = (Map<String, Object>) o;
for (Map.Entry<String,Object> entry : m.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if ("processEmpty".equals(key)) {
facet.processEmpty = getBoolean(m, "processEmpty", false);
continue;
}
// "my_prices" : { "range" : { "field":...
// key="my_prices", value={"range":..
Object parsedValue = parseFacetOrStat(key, value);
// TODO: have parseFacetOrStat directly add instead of return?
if (parsedValue instanceof FacetRequest) {
facet.addSubFacet(key, (FacetRequest)parsedValue);
} else if (parsedValue instanceof AggValueSource) {
facet.addStat(key, (AggValueSource)parsedValue);
} else {
throw err("Unknown facet type key=" + key + " class=" + (parsedValue == null ? "null" : parsedValue.getClass().getName()));
}
}
} else {
// facet : my_field?
throw err("Expected map for facet/stat");
}
}
public Object parseFacetOrStat(String key, Object o) throws SyntaxError {
if (o instanceof String) {
return parseStringFacetOrStat(key, (String)o);
}
if (!(o instanceof Map)) {
throw err("expected Map but got " + o);
}
// The type can be in a one element map, or inside the args as the "type" field
// { "query" : "foo:bar" }
// { "range" : { "field":... } }
// { "type" : range, field : myfield, ... }
@SuppressWarnings({"unchecked"})
Map<String,Object> m = (Map<String,Object>)o;
String type;
Object args;
if (m.size() == 1) {
Map.Entry<String,Object> entry = m.entrySet().iterator().next();
type = entry.getKey();
args = entry.getValue();
// throw err("expected facet/stat type name, like {range:{... but got " + m);
} else {
// type should be inside the map as a parameter
Object typeObj = m.get("type");
if (!(typeObj instanceof String)) {
throw err("expected facet/stat type name, like {type:range, field:price, ...} but got " + typeObj);
}
type = (String)typeObj;
args = m;
}
return parseFacetOrStat(key, type, args);
}
public Object parseFacetOrStat(String key, String type, Object args) throws SyntaxError {
// TODO: a place to register all these facet types?
switch (type) {
case "field":
case "terms":
return new FacetRequest.FacetFieldParser(this, key).parse(args);
case "query":
return new FacetRequest.FacetQueryParser(this, key).parse(args);
case "range":
return new FacetRangeParser(this, key).parse(args);
case "heatmap":
return new FacetHeatmap.Parser(this, key).parse(args);
case "func":
return parseStat(key, args);
}
throw err("Unknown facet or stat. key=" + key + " type=" + type + " args=" + args);
}
public Object parseStringFacetOrStat(String key, String s) throws SyntaxError {
// "avg(myfield)"
return parseStat(key, s);
// TODO - simple string representation of facets
}
/** Parses simple strings like "avg(x)" in the context of optional local params (may be null) */
private AggValueSource parseStatWithParams(String key, SolrParams localparams, String stat) throws SyntaxError {
SolrQueryRequest req = getSolrRequest();
FunctionQParser parser = new FunctionQParser(stat, localparams, req.getParams(), req);
AggValueSource agg = parser.parseAgg(FunctionQParser.FLAG_DEFAULT);
return agg;
}
/** Parses simple strings like "avg(x)" or robust Maps that may contain local params */
private AggValueSource parseStat(String key, Object args) throws SyntaxError {
assert null != args;
if (args instanceof CharSequence) {
// Both of these variants are already unpacked for us in this case, and use no local params...
// 1) x:{func:'min(foo)'}
// 2) x:'min(foo)'
return parseStatWithParams(key, null, args.toString());
}
if (args instanceof Map) {
@SuppressWarnings({"unchecked"})
final Map<String,Object> statMap = (Map<String,Object>)args;
return parseStatWithParams(key, jsonToSolrParams(statMap), statMap.get("func").toString());
}
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Stats must be specified as either a simple string, or a json Map");
}
private FacetRequest.Domain getDomain() {
if (facet.domain == null) {
facet.domain = new FacetRequest.Domain();
}
return facet.domain;
}
protected void parseCommonParams(Object o) {
if (o instanceof Map) {
@SuppressWarnings({"unchecked"})
Map<String,Object> m = (Map<String,Object>)o;
List<String> excludeTags = getStringList(m, "excludeTags");
if (excludeTags != null) {
getDomain().excludeTags = excludeTags;
}
Object domainObj = m.get("domain");
if (domainObj instanceof Map) {
@SuppressWarnings({"unchecked"})
Map<String, Object> domainMap = (Map<String, Object>)domainObj;
FacetRequest.Domain domain = getDomain();
excludeTags = getStringList(domainMap, "excludeTags");
if (excludeTags != null) {
domain.excludeTags = excludeTags;
}
if (domainMap.containsKey("query")) {
domain.explicitQueries = parseJSONQueryStruct(domainMap.get("query"));
if (null == domain.explicitQueries) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"'query' domain can not be null or empty");
} else if (null != domain.excludeTags) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"'query' domain can not be combined with 'excludeTags'");
}
}
String blockParent = getString(domainMap, "blockParent", null);
String blockChildren = getString(domainMap, "blockChildren", null);
if (blockParent != null) {
domain.toParent = true;
domain.parents = blockParent;
} else if (blockChildren != null) {
domain.toChildren = true;
domain.parents = blockChildren;
}
FacetRequest.Domain.JoinField.createJoinField(domain, domainMap);
FacetRequest.Domain.GraphField.createGraphField(domain, domainMap);
Object filterOrList = domainMap.get("filter");
if (filterOrList != null) {
assert domain.filters == null;
domain.filters = parseJSONQueryStruct(filterOrList);
}
} else if (domainObj != null) {
throw err("Expected Map for 'domain', received " + domainObj.getClass().getSimpleName() + "=" + domainObj);
}
}
}
/** returns null on null input, otherwise returns a list of the JSON query structures -- either
* directly from the raw (list) input, or if raw input is a not a list then it encapsulates
* it in a new list.
*/
@SuppressWarnings({"unchecked"})
private List<Object> parseJSONQueryStruct(Object raw) {
List<Object> result = null;
if (null == raw) {
return result;
} else if (raw instanceof List) {
result = (List<Object>) raw;
} else {
result = new ArrayList<>(1);
result.add(raw);
}
return result;
}
public String getField(Map<String,Object> args) {
Object fieldName = args.get("field"); // TODO: pull out into defined constant
if (fieldName == null) {
fieldName = args.get("f"); // short form
}
if (fieldName == null) {
throw err("Missing 'field'");
}
if (!(fieldName instanceof String)) {
throw err("Expected string for 'field', got" + fieldName);
}
return (String)fieldName;
}
public Long getLongOrNull(Map<String,Object> args, String paramName, boolean required) {
Object o = args.get(paramName);
if (o == null) {
if (required) {
throw err("Missing required parameter '" + paramName + "'");
}
return null;
}
if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) {
throw err("Expected integer type for param '"+paramName + "' but got " + o);
}
return ((Number)o).longValue();
}
public long getLong(Map<String,Object> args, String paramName, long defVal) {
Object o = args.get(paramName);
if (o == null) {
return defVal;
}
if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) {
throw err("Expected integer type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
}
return ((Number)o).longValue();
}
public Double getDoubleOrNull(Map<String,Object> args, String paramName, boolean required) {
Object o = args.get(paramName);
if (o == null) {
if (required) {
throw err("Missing required parameter '" + paramName + "'");
}
return null;
}
if (!(o instanceof Number)) {
throw err("Expected double type for param '" + paramName + "' but got " + o);
}
return ((Number)o).doubleValue();
}
public boolean getBoolean(Map<String,Object> args, String paramName, boolean defVal) {
Object o = args.get(paramName);
if (o == null) {
return defVal;
}
// TODO: should we be more flexible and accept things like "true" (strings)?
// Perhaps wait until the use case comes up.
if (!(o instanceof Boolean)) {
throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
}
return (Boolean)o;
}
public Boolean getBooleanOrNull(Map<String, Object> args, String paramName) {
Object o = args.get(paramName);
if (o != null && !(o instanceof Boolean)) {
throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
}
return (Boolean) o;
}
public String getString(Map<String,Object> args, String paramName, String defVal) {
Object o = args.get(paramName);
if (o == null) {
return defVal;
}
if (!(o instanceof String)) {
throw err("Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
}
return (String)o;
}
public Object getVal(Map<String, Object> args, String paramName, boolean required) {
Object o = args.get(paramName);
if (o == null && required) {
throw err("Missing required parameter: '" + paramName + "'");
}
return o;
}
public List<String> getStringList(Map<String,Object> args, String paramName) {
return getStringList(args, paramName, true);
}
@SuppressWarnings({"unchecked"})
public List<String> getStringList(Map<String, Object> args, String paramName, boolean decode) {
Object o = args.get(paramName);
if (o == null) {
return null;
}
if (o instanceof List) {
return (List<String>)o;
}
if (o instanceof String) {
// TODO: SOLR-12539 handle spaces in b/w comma & value ie, should the values be trimmed before returning??
return StrUtils.splitSmart((String)o, ",", decode);
}
throw err("Expected list of string or comma separated string values for '" + paramName +
"', received " + o.getClass().getSimpleName() + "=" + o);
}
public IndexSchema getSchema() {
return parent.getSchema();
}
public SolrQueryRequest getSolrRequest() {
return parent.getSolrRequest();
}
/**
* Helper that handles the possibility of map values being lists
* NOTE: does *NOT* fail on map values that are sub-maps (ie: nested json objects)
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public static SolrParams jsonToSolrParams(Map jsonObject) {
// HACK, but NamedList already handles the list processing for us...
NamedList<String> nl = new NamedList<>();
nl.addAll(jsonObject);
return SolrParams.toSolrParams(nl);
}
}

View File

@ -47,21 +47,21 @@ import org.apache.solr.search.facet.SlotAcc.SlotContext;
/** Base abstraction for a class that computes facets. This is fairly internal to the module. */
public abstract class FacetProcessor<FacetRequestT extends FacetRequest> {
SimpleOrderedMap<Object> response;
FacetRequest.FacetContext fcontext;
FacetContext fcontext;
FacetRequestT freq;
DocSet filter; // additional filters specified by "filter" // TODO: do these need to be on the context to support recomputing during multi-select?
LinkedHashMap<String,SlotAcc> accMap;
SlotAcc[] accs;
CountSlotAcc countAcc;
SlotAcc.CountSlotAcc countAcc;
FacetProcessor(FacetRequest.FacetContext fcontext, FacetRequestT freq) {
FacetProcessor(FacetContext fcontext, FacetRequestT freq) {
this.fcontext = fcontext;
this.freq = freq;
fcontext.processor = this;
}
public Object getResponse() {
public org.apache.solr.common.MapWriter getResponse() {
return response;
}
@ -74,7 +74,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest> {
this.filter = fcontext.searcher.getDocSet(evalJSONFilterQueryStruct(fcontext, freq.domain.filters));
}
private static List<Query> evalJSONFilterQueryStruct(FacetRequest.FacetContext fcontext, List<Object> filters) throws IOException {
private static List<Query> evalJSONFilterQueryStruct(FacetContext fcontext, List<Object> filters) throws IOException {
List<Query> qlist = new ArrayList<>(filters.size());
// TODO: prevent parsing filters each time!
for (Object rawFilter : filters) {
@ -82,6 +82,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest> {
qlist.add(parserFilter((String) rawFilter, fcontext.req));
} else if (rawFilter instanceof Map) {
@SuppressWarnings({"unchecked"})
Map<String,Object> m = (Map<String, Object>) rawFilter;
String type;
Object args;
@ -181,6 +182,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest> {
return;
}
@SuppressWarnings({"rawtypes"})
Map tagMap = (Map) fcontext.req.getContext().get("tags");
if (tagMap == null) {
// no filters were tagged
@ -226,7 +228,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest> {
// now walk back up the context tree
// TODO: we lose parent exclusions...
for (FacetRequest.FacetContext curr = fcontext; curr != null; curr = curr.parent) {
for (FacetContext curr = fcontext; curr != null; curr = curr.parent) {
if (curr.filter != null) {
qlist.add( curr.filter );
}
@ -307,7 +309,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest> {
// allow a custom count acc to be used
if (countAcc == null) {
countAcc = new CountSlotArrAcc(fcontext, slotCount);
countAcc = new SlotAcc.CountSlotArrAcc(fcontext, slotCount);
countAcc.key = "count";
}
@ -438,6 +440,7 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest> {
}
}
@SuppressWarnings({"unchecked"})
void processSubs(SimpleOrderedMap<Object> response, Query filter, DocSet domain, boolean skip, Map<String,Object> facetInfo) throws IOException {
boolean emptyDomain = domain == null || domain.size() == 0;
@ -462,9 +465,9 @@ public abstract class FacetProcessor<FacetRequestT extends FacetRequest> {
if (skip && facetInfoSub == null) continue;
// make a new context for each sub-facet since they can change the domain
FacetRequest.FacetContext subContext = fcontext.sub(filter, domain);
FacetContext subContext = fcontext.sub(filter, domain);
subContext.facetInfo = facetInfoSub;
if (!skip) subContext.flags &= ~FacetRequest.FacetContext.SKIP_FACET; // turn off the skip flag if we're not skipping this bucket
if (!skip) subContext.flags &= ~FacetContext.SKIP_FACET; // turn off the skip flag if we're not skipping this bucket
if (fcontext.getDebugInfo() != null) { // if fcontext.debugInfo != null, it means rb.debug() == true
FacetDebugInfo fdebug = new FacetDebugInfo();

View File

@ -27,6 +27,7 @@ public class FacetQuery extends FacetRequest {
// query string or query?
Query q;
@SuppressWarnings("rawtypes")
@Override
public FacetProcessor createFacetProcessor(FacetContext fcontext) {
return new FacetQueryProcessor(fcontext, this);
@ -34,7 +35,7 @@ public class FacetQuery extends FacetRequest {
@Override
public FacetMerger createFacetMerger(Object prototype) {
return new FacetQueryMerger(this);
return new FacetModule.FacetQueryMerger(this);
}
@Override
@ -49,7 +50,7 @@ public class FacetQuery extends FacetRequest {
class FacetQueryProcessor extends FacetProcessor<FacetQuery> {
FacetQueryProcessor(FacetRequest.FacetContext fcontext, FacetQuery freq) {
FacetQueryProcessor(FacetContext fcontext, FacetQuery freq) {
super(fcontext, freq);
}
@ -61,7 +62,7 @@ class FacetQueryProcessor extends FacetProcessor<FacetQuery> {
// FIXME - what needs to be done here?
}
response = new SimpleOrderedMap<>();
fillBucket(response, freq.q, null, (fcontext.flags & FacetRequest.FacetContext.SKIP_FACET)!=0, fcontext.facetInfo);
fillBucket(response, freq.q, null, (fcontext.flags & FacetContext.SKIP_FACET)!=0, fcontext.facetInfo);
}

File diff suppressed because it is too large Load Diff

View File

@ -89,7 +89,7 @@ public class FacetRangeMerger extends FacetRequestSortedMerger<FacetRange> {
return refinement;
}
public void merge(SimpleOrderedMap facetResult, Context mcontext) {
public void merge(@SuppressWarnings("rawtypes") SimpleOrderedMap facetResult, Context mcontext) {
boolean all = freq.others.contains(FacetParams.FacetRangeOther.ALL);
if (all || freq.others.contains(FacetParams.FacetRangeOther.BEFORE)) {
@ -131,12 +131,14 @@ public class FacetRangeMerger extends FacetRequestSortedMerger<FacetRange> {
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
List<SimpleOrderedMap> bucketList = (List<SimpleOrderedMap>) facetResult.get("buckets");
mergeBucketList(bucketList , mcontext);
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getMergedResult() {
// TODO: use sortedBuckets
SimpleOrderedMap result = new SimpleOrderedMap(4);

View File

@ -0,0 +1,76 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search.facet;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.search.SyntaxError;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
class FacetRangeParser extends FacetParser<FacetRange> {
@SuppressWarnings({"rawtypes"})
public FacetRangeParser(FacetParser parent, String key) {
super(parent, key);
facet = new FacetRange();
}
public FacetRange parse(Object arg) throws SyntaxError {
parseCommonParams(arg);
if (!(arg instanceof Map)) {
throw err("Missing range facet arguments");
}
@SuppressWarnings({"unchecked"})
Map<String, Object> m = (Map<String, Object>) arg;
facet.field = getString(m, "field", null);
facet.ranges = getVal(m, "ranges", false);
boolean required = facet.ranges == null;
facet.start = getVal(m, "start", required);
facet.end = getVal(m, "end", required);
facet.gap = getVal(m, "gap", required);
facet.hardend = getBoolean(m, "hardend", facet.hardend);
facet.mincount = getLong(m, "mincount", 0);
// TODO: refactor list-of-options code
List<String> list = getStringList(m, "include", false);
String[] includeList = null;
if (list != null) {
includeList = list.toArray(new String[list.size()]);
}
facet.include = FacetParams.FacetRangeInclude.parseParam( includeList );
facet.others = EnumSet.noneOf(FacetParams.FacetRangeOther.class);
List<String> other = getStringList(m, "other", false);
if (other != null) {
for (String otherStr : other) {
facet.others.add( FacetParams.FacetRangeOther.get(otherStr) );
}
}
Object facetObj = m.get("facet");
parseSubs(facetObj);
return facet;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -17,8 +17,6 @@
package org.apache.solr.search.facet;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@ -27,19 +25,14 @@ import java.util.Optional;
import org.apache.lucene.search.Query;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.FunctionQParser;
import org.apache.solr.search.JoinQParserPlugin;
import org.apache.solr.search.QParser;
import org.apache.solr.search.QueryContext;
import org.apache.solr.search.SolrConstantScoreQuery;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SyntaxError;
import org.apache.solr.search.join.GraphQuery;
import org.apache.solr.search.join.GraphQueryParser;
@ -203,6 +196,7 @@ public abstract class FacetRequest {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"'join' domain change requires a map containing the 'from' and 'to' fields");
}
@SuppressWarnings({"unchecked"})
final Map<String,String> join = (Map<String,String>) queryJoin;
if (! (join.containsKey("from") && join.containsKey("to") &&
null != join.get("from") && null != join.get("to")) ) {
@ -264,6 +258,7 @@ public abstract class FacetRequest {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"'graph' domain change requires a map containing the 'from' and 'to' fields");
}
@SuppressWarnings({"unchecked"})
final Map<String,String> graph = (Map<String,String>) queryGraph;
if (! (graph.containsKey("from") && graph.containsKey("to") &&
null != graph.get("from") && null != graph.get("to")) ) {
@ -306,6 +301,7 @@ public abstract class FacetRequest {
* @param params a typed parameter structure (unlike SolrParams which are all string values).
*/
public static FacetRequest parse(SolrQueryRequest req, Map<String, Object> params) {
@SuppressWarnings({"rawtypes"})
FacetParser parser = new FacetTopParser(req);
try {
return parser.parse(params);
@ -324,6 +320,7 @@ public abstract class FacetRequest {
* @param params a typed parameter structure (unlike SolrParams which are all string values).
*/
public static FacetRequest parseOneFacetReq(SolrQueryRequest req, Map<String, Object> params) {
@SuppressWarnings("rawtypes")
FacetParser parser = new FacetTopParser(req);
try {
return (FacetRequest) parser.parseFacetOrStat("", params);
@ -409,6 +406,7 @@ public abstract class FacetRequest {
/** Process the request with the facet context settings, a parameter-object. */
final Object process(FacetContext fcontext) throws IOException {
@SuppressWarnings("rawtypes")
FacetProcessor facetProcessor = createFacetProcessor(fcontext);
FacetDebugInfo debugInfo = fcontext.getDebugInfo();
@ -432,437 +430,13 @@ public abstract class FacetRequest {
return facetProcessor.getResponse();
}
@SuppressWarnings("rawtypes")
public abstract FacetProcessor createFacetProcessor(FacetContext fcontext);
public abstract FacetMerger createFacetMerger(Object prototype);
public abstract Map<String, Object> getFacetDescription();
static class FacetContext {
// Context info for actually executing a local facet command
public static final int IS_SHARD=0x01;
public static final int IS_REFINEMENT=0x02;
public static final int SKIP_FACET=0x04; // refinement: skip calculating this immediate facet, but proceed to specific sub-facets based on facetInfo
FacetProcessor processor;
Map<String,Object> facetInfo; // refinement info for this node
QueryContext qcontext;
SolrQueryRequest req; // TODO: replace with params?
SolrIndexSearcher searcher;
Query filter; // TODO: keep track of as a DocSet or as a Query?
DocSet base;
FacetContext parent;
int flags;
FacetDebugInfo debugInfo;
public void setDebugInfo(FacetDebugInfo debugInfo) {
this.debugInfo = debugInfo;
}
public FacetDebugInfo getDebugInfo() {
return debugInfo;
}
public boolean isShard() {
return (flags & IS_SHARD) != 0;
}
/**
* @param filter The filter for the bucket that resulted in this context/domain. Can be null if this is the root context.
* @param domain The resulting set of documents for this facet.
*/
public FacetContext sub(Query filter, DocSet domain) {
FacetContext ctx = new FacetContext();
ctx.parent = this;
ctx.base = domain;
ctx.filter = filter;
// carry over from parent
ctx.flags = flags;
ctx.qcontext = qcontext;
ctx.req = req;
ctx.searcher = searcher;
return ctx;
}
}
abstract static class FacetParser<FacetRequestT extends FacetRequest> {
protected FacetRequestT facet;
protected FacetParser parent;
protected String key;
public FacetParser(FacetParser parent, String key) {
this.parent = parent;
this.key = key;
}
public String getKey() {
return key;
}
public String getPathStr() {
if (parent == null) {
return "/" + key;
}
return parent.getKey() + "/" + key;
}
protected RuntimeException err(String msg) {
return new SolrException(SolrException.ErrorCode.BAD_REQUEST, msg + " , path="+getPathStr());
}
public abstract FacetRequest parse(Object o) throws SyntaxError;
// TODO: put the FacetRequest on the parser object?
public void parseSubs(Object o) throws SyntaxError {
if (o==null) return;
if (o instanceof Map) {
Map<String,Object> m = (Map<String, Object>) o;
for (Map.Entry<String,Object> entry : m.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
if ("processEmpty".equals(key)) {
facet.processEmpty = getBoolean(m, "processEmpty", false);
continue;
}
// "my_prices" : { "range" : { "field":...
// key="my_prices", value={"range":..
Object parsedValue = parseFacetOrStat(key, value);
// TODO: have parseFacetOrStat directly add instead of return?
if (parsedValue instanceof FacetRequest) {
facet.addSubFacet(key, (FacetRequest)parsedValue);
} else if (parsedValue instanceof AggValueSource) {
facet.addStat(key, (AggValueSource)parsedValue);
} else {
throw err("Unknown facet type key=" + key + " class=" + (parsedValue == null ? "null" : parsedValue.getClass().getName()));
}
}
} else {
// facet : my_field?
throw err("Expected map for facet/stat");
}
}
public Object parseFacetOrStat(String key, Object o) throws SyntaxError {
if (o instanceof String) {
return parseStringFacetOrStat(key, (String)o);
}
if (!(o instanceof Map)) {
throw err("expected Map but got " + o);
}
// The type can be in a one element map, or inside the args as the "type" field
// { "query" : "foo:bar" }
// { "range" : { "field":... } }
// { "type" : range, field : myfield, ... }
Map<String,Object> m = (Map<String,Object>)o;
String type;
Object args;
if (m.size() == 1) {
Map.Entry<String,Object> entry = m.entrySet().iterator().next();
type = entry.getKey();
args = entry.getValue();
// throw err("expected facet/stat type name, like {range:{... but got " + m);
} else {
// type should be inside the map as a parameter
Object typeObj = m.get("type");
if (!(typeObj instanceof String)) {
throw err("expected facet/stat type name, like {type:range, field:price, ...} but got " + typeObj);
}
type = (String)typeObj;
args = m;
}
return parseFacetOrStat(key, type, args);
}
public Object parseFacetOrStat(String key, String type, Object args) throws SyntaxError {
// TODO: a place to register all these facet types?
switch (type) {
case "field":
case "terms":
return new FacetFieldParser(this, key).parse(args);
case "query":
return new FacetQueryParser(this, key).parse(args);
case "range":
return new FacetRangeParser(this, key).parse(args);
case "heatmap":
return new FacetHeatmap.Parser(this, key).parse(args);
case "func":
return parseStat(key, args);
}
throw err("Unknown facet or stat. key=" + key + " type=" + type + " args=" + args);
}
public Object parseStringFacetOrStat(String key, String s) throws SyntaxError {
// "avg(myfield)"
return parseStat(key, s);
// TODO - simple string representation of facets
}
/** Parses simple strings like "avg(x)" in the context of optional local params (may be null) */
private AggValueSource parseStatWithParams(String key, SolrParams localparams, String stat) throws SyntaxError {
SolrQueryRequest req = getSolrRequest();
FunctionQParser parser = new FunctionQParser(stat, localparams, req.getParams(), req);
AggValueSource agg = parser.parseAgg(FunctionQParser.FLAG_DEFAULT);
return agg;
}
/** Parses simple strings like "avg(x)" or robust Maps that may contain local params */
private AggValueSource parseStat(String key, Object args) throws SyntaxError {
assert null != args;
if (args instanceof CharSequence) {
// Both of these variants are already unpacked for us in this case, and use no local params...
// 1) x:{func:'min(foo)'}
// 2) x:'min(foo)'
return parseStatWithParams(key, null, args.toString());
}
if (args instanceof Map) {
final Map<String,Object> statMap = (Map<String,Object>)args;
return parseStatWithParams(key, jsonToSolrParams(statMap), statMap.get("func").toString());
}
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"Stats must be specified as either a simple string, or a json Map");
}
private Domain getDomain() {
if (facet.domain == null) {
facet.domain = new Domain();
}
return facet.domain;
}
protected void parseCommonParams(Object o) {
if (o instanceof Map) {
Map<String,Object> m = (Map<String,Object>)o;
List<String> excludeTags = getStringList(m, "excludeTags");
if (excludeTags != null) {
getDomain().excludeTags = excludeTags;
}
Object domainObj = m.get("domain");
if (domainObj instanceof Map) {
Map<String, Object> domainMap = (Map<String, Object>)domainObj;
Domain domain = getDomain();
excludeTags = getStringList(domainMap, "excludeTags");
if (excludeTags != null) {
domain.excludeTags = excludeTags;
}
if (domainMap.containsKey("query")) {
domain.explicitQueries = parseJSONQueryStruct(domainMap.get("query"));
if (null == domain.explicitQueries) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"'query' domain can not be null or empty");
} else if (null != domain.excludeTags) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"'query' domain can not be combined with 'excludeTags'");
}
}
String blockParent = getString(domainMap, "blockParent", null);
String blockChildren = getString(domainMap, "blockChildren", null);
if (blockParent != null) {
domain.toParent = true;
domain.parents = blockParent;
} else if (blockChildren != null) {
domain.toChildren = true;
domain.parents = blockChildren;
}
Domain.JoinField.createJoinField(domain, domainMap);
Domain.GraphField.createGraphField(domain, domainMap);
Object filterOrList = domainMap.get("filter");
if (filterOrList != null) {
assert domain.filters == null;
domain.filters = parseJSONQueryStruct(filterOrList);
}
} else if (domainObj != null) {
throw err("Expected Map for 'domain', received " + domainObj.getClass().getSimpleName() + "=" + domainObj);
}
}
}
/** returns null on null input, otherwise returns a list of the JSON query structures -- either
* directly from the raw (list) input, or if raw input is a not a list then it encapsulates
* it in a new list.
*/
private List<Object> parseJSONQueryStruct(Object raw) {
List<Object> result = null;
if (null == raw) {
return result;
} else if (raw instanceof List) {
result = (List<Object>) raw;
} else {
result = new ArrayList<>(1);
result.add(raw);
}
return result;
}
public String getField(Map<String,Object> args) {
Object fieldName = args.get("field"); // TODO: pull out into defined constant
if (fieldName == null) {
fieldName = args.get("f"); // short form
}
if (fieldName == null) {
throw err("Missing 'field'");
}
if (!(fieldName instanceof String)) {
throw err("Expected string for 'field', got" + fieldName);
}
return (String)fieldName;
}
public Long getLongOrNull(Map<String,Object> args, String paramName, boolean required) {
Object o = args.get(paramName);
if (o == null) {
if (required) {
throw err("Missing required parameter '" + paramName + "'");
}
return null;
}
if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) {
throw err("Expected integer type for param '"+paramName + "' but got " + o);
}
return ((Number)o).longValue();
}
public long getLong(Map<String,Object> args, String paramName, long defVal) {
Object o = args.get(paramName);
if (o == null) {
return defVal;
}
if (!(o instanceof Long || o instanceof Integer || o instanceof Short || o instanceof Byte)) {
throw err("Expected integer type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
}
return ((Number)o).longValue();
}
public Double getDoubleOrNull(Map<String,Object> args, String paramName, boolean required) {
Object o = args.get(paramName);
if (o == null) {
if (required) {
throw err("Missing required parameter '" + paramName + "'");
}
return null;
}
if (!(o instanceof Number)) {
throw err("Expected double type for param '" + paramName + "' but got " + o);
}
return ((Number)o).doubleValue();
}
public boolean getBoolean(Map<String,Object> args, String paramName, boolean defVal) {
Object o = args.get(paramName);
if (o == null) {
return defVal;
}
// TODO: should we be more flexible and accept things like "true" (strings)?
// Perhaps wait until the use case comes up.
if (!(o instanceof Boolean)) {
throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
}
return (Boolean)o;
}
public Boolean getBooleanOrNull(Map<String, Object> args, String paramName) {
Object o = args.get(paramName);
if (o != null && !(o instanceof Boolean)) {
throw err("Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
}
return (Boolean) o;
}
public String getString(Map<String,Object> args, String paramName, String defVal) {
Object o = args.get(paramName);
if (o == null) {
return defVal;
}
if (!(o instanceof String)) {
throw err("Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
}
return (String)o;
}
public Object getVal(Map<String, Object> args, String paramName, boolean required) {
Object o = args.get(paramName);
if (o == null && required) {
throw err("Missing required parameter: '" + paramName + "'");
}
return o;
}
public List<String> getStringList(Map<String,Object> args, String paramName) {
return getStringList(args, paramName, true);
}
public List<String> getStringList(Map<String, Object> args, String paramName, boolean decode) {
Object o = args.get(paramName);
if (o == null) {
return null;
}
if (o instanceof List) {
return (List<String>)o;
}
if (o instanceof String) {
// TODO: SOLR-12539 handle spaces in b/w comma & value ie, should the values be trimmed before returning??
return StrUtils.splitSmart((String)o, ",", decode);
}
throw err("Expected list of string or comma separated string values for '" + paramName +
"', received " + o.getClass().getSimpleName() + "=" + o);
}
public IndexSchema getSchema() {
return parent.getSchema();
}
public SolrQueryRequest getSolrRequest() {
return parent.getSolrRequest();
}
/**
* Helper that handles the possibility of map values being lists
* NOTE: does *NOT* fail on map values that are sub-maps (ie: nested json objects)
*/
public static SolrParams jsonToSolrParams(Map jsonObject) {
// HACK, but NamedList already handles the list processing for us...
NamedList<String> nl = new NamedList<>();
nl.addAll(jsonObject);
return SolrParams.toSolrParams(nl);
}
}
static class FacetTopParser extends FacetParser<FacetQuery> {
private SolrQueryRequest req;
@ -890,7 +464,7 @@ public abstract class FacetRequest {
}
static class FacetQueryParser extends FacetParser<FacetQuery> {
public FacetQueryParser(FacetParser parent, String key) {
public FacetQueryParser(@SuppressWarnings("rawtypes") FacetParser parent, String key) {
super(parent, key);
facet = new FacetQuery();
}
@ -905,6 +479,7 @@ public abstract class FacetRequest {
qstring = (String)arg;
} else if (arg instanceof Map) {
@SuppressWarnings({"unchecked"})
Map<String, Object> m = (Map<String, Object>) arg;
qstring = getString(m, "q", null);
if (qstring == null) {
@ -959,6 +534,7 @@ static class FacetBlockParentParser extends FacetParser<FacetBlockParent> {
***/
static class FacetFieldParser extends FacetParser<FacetField> {
@SuppressWarnings({"rawtypes"})
public FacetFieldParser(FacetParser parent, String key) {
super(parent, key);
facet = new FacetField();
@ -971,6 +547,7 @@ static class FacetBlockParentParser extends FacetParser<FacetBlockParent> {
facet.field = (String)arg;
} else if (arg instanceof Map) {
@SuppressWarnings({"unchecked"})
Map<String, Object> m = (Map<String, Object>) arg;
facet.field = getField(m);
facet.offset = getLong(m, "offset", facet.offset);
@ -1049,6 +626,7 @@ static class FacetBlockParentParser extends FacetParser<FacetBlockParent> {
}
} else if (sort instanceof Map) {
// { myvar : 'desc' }
@SuppressWarnings("unchecked")
Optional<Map.Entry<String,Object>> optional = ((Map<String,Object>)sort).entrySet().stream().findFirst();
if (optional.isPresent()) {
Map.Entry<String, Object> entry = optional.get();
@ -1075,55 +653,6 @@ static class FacetBlockParentParser extends FacetParser<FacetBlockParent> {
}
static class FacetRangeParser extends FacetParser<FacetRange> {
public FacetRangeParser(FacetParser parent, String key) {
super(parent, key);
facet = new FacetRange();
}
public FacetRange parse(Object arg) throws SyntaxError {
parseCommonParams(arg);
if (!(arg instanceof Map)) {
throw err("Missing range facet arguments");
}
Map<String, Object> m = (Map<String, Object>) arg;
facet.field = getString(m, "field", null);
facet.ranges = getVal(m, "ranges", false);
boolean required = facet.ranges == null;
facet.start = getVal(m, "start", required);
facet.end = getVal(m, "end", required);
facet.gap = getVal(m, "gap", required);
facet.hardend = getBoolean(m, "hardend", facet.hardend);
facet.mincount = getLong(m, "mincount", 0);
// TODO: refactor list-of-options code
List<String> list = getStringList(m, "include", false);
String[] includeList = null;
if (list != null) {
includeList = list.toArray(new String[list.size()]);
}
facet.include = FacetParams.FacetRangeInclude.parseParam( includeList );
facet.others = EnumSet.noneOf(FacetParams.FacetRangeOther.class);
List<String> other = getStringList(m, "other", false);
if (other != null) {
for (String otherStr : other) {
facet.others.add( FacetParams.FacetRangeOther.get(otherStr) );
}
}
Object facetObj = m.get("facet");
parseSubs(facetObj);
return facet;
}
}
}

View File

@ -0,0 +1,58 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search.facet;
// Any type of facet request that generates a variable number of buckets
// and the ability to sort by those generated buckets.
abstract class FacetRequestSorted extends FacetRequest {
long offset;
long limit;
/**
* Number of buckets to request beyond the limit to do internally during initial distributed search.
* -1 means default heuristic.
*/
int overrequest = -1;
/**
* Number of buckets to fill in beyond the limit to do internally during refinement of distributed search.
* -1 means default heuristic.
*/
int overrefine = -1;
long mincount;
/**
* The basic sorting to do on buckets, defaults to {@link FacetRequest.FacetSort#COUNT_DESC}
* @see #prelim_sort
*/
FacetSort sort;
/**
* An optional "Pre-Sort" that defaults to null.
* If specified, then the <code>prelim_sort</code> is used as an optimization in place of {@link #sort}
* during collection, and the full {@link #sort} values are only computed for the top candidate buckets
* (after refinement)
*/
FacetSort prelim_sort;
RefineMethod refine; // null, NONE, or SIMPLE
@Override
public RefineMethod getRefineMethod() {
return refine;
}
@Override
public boolean returnsPartial() {
return super.returnsPartial() || (limit > 0);
}
}

View File

@ -31,7 +31,7 @@ import java.util.Map;
import org.apache.solr.common.util.SimpleOrderedMap;
// base class for facets that create a list of buckets that can be sorted
abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted> extends FacetBucketMerger<FacetRequestT> {
abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted> extends FacetModule.FacetBucketMerger<FacetRequestT> {
LinkedHashMap<Object,FacetBucket> buckets = new LinkedHashMap<>();
List<FacetBucket> sortedBuckets;
BitSet shardHasMoreBuckets; // null, or "true" if we saw a result from this shard and it indicated that there are more results
@ -44,6 +44,7 @@ abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted
@Override
public void merge(Object facetResult, Context mcontext) {
this.mcontext = mcontext;
@SuppressWarnings({"rawtypes"})
SimpleOrderedMap res = (SimpleOrderedMap)facetResult;
Boolean more = (Boolean)res.get("more");
if (more != null && more) {
@ -57,16 +58,18 @@ abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted
private static class SortVal implements Comparable<SortVal> {
FacetBucket bucket;
FacetSortableMerger merger; // make this class inner and access merger , direction in parent?
FacetModule.FacetSortableMerger merger; // make this class inner and access merger , direction in parent?
FacetRequest.SortDirection direction;
@Override
@SuppressWarnings({"unchecked"})
public int compareTo(SortVal o) {
int c = -merger.compareTo(o.merger, direction) * direction.getMultiplier();
return c == 0 ? bucket.bucketValue.compareTo(o.bucket.bucketValue) : c;
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
public void mergeBucketList(List<SimpleOrderedMap> bucketList, Context mcontext) {
for (SimpleOrderedMap bucketRes : bucketList) {
Comparable bucketVal = (Comparable)bucketRes.get("val");
@ -80,6 +83,7 @@ abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted
}
@SuppressWarnings({"unchecked", "rawtypes"})
public void sortBuckets(final FacetRequest.FacetSort sort) {
// NOTE: we *always* re-init from buckets, because it may have been modified post-refinement
sortedBuckets = new ArrayList<>( buckets.values() );
@ -140,7 +144,7 @@ abstract class FacetRequestSortedMerger<FacetRequestT extends FacetRequestSorted
if (merger != null) {
SortVal sv = new SortVal();
sv.bucket = bucket;
sv.merger = (FacetSortableMerger)merger;
sv.merger = (FacetModule.FacetSortableMerger)merger;
sv.direction = direction;
// sv.pos = i; // if we need position in the future...
lst.add(sv);

View File

@ -50,7 +50,7 @@ public class HLLAgg extends StrAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
SchemaField sf = fcontext.qcontext.searcher().getSchema().getField(getArg());
if (sf.multiValued() || sf.getType().multiValuedFieldCache()) {
if (sf.getType().isPointField()) {
@ -75,7 +75,7 @@ public class HLLAgg extends StrAggValueSource {
return new Merger();
}
private static class Merger extends FacetSortableMerger {
private static class Merger extends FacetModule.FacetSortableMerger {
HLL aggregate = null;
long answer = -1; // -1 means unset
@ -86,6 +86,9 @@ public class HLLAgg extends StrAggValueSource {
return;
}
@SuppressWarnings({"rawtypes"})
SimpleOrderedMap map = (SimpleOrderedMap)facetResult;
byte[] serialized = ((byte[])map.get("hll"));
HLL subHLL = HLL.fromBytes(serialized);
@ -109,7 +112,7 @@ public class HLLAgg extends StrAggValueSource {
}
@Override
public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) {
return Long.compare( getLong(), ((Merger)other).getLong() );
}
}
@ -121,7 +124,7 @@ public class HLLAgg extends StrAggValueSource {
abstract class BaseNumericAcc extends DocValuesAcc {
HLL[] sets;
public BaseNumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
public BaseNumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
super(fcontext, fcontext.qcontext.searcher().getSchema().getField(field));
sets = new HLL[numSlots];
}
@ -160,6 +163,7 @@ public class HLLAgg extends StrAggValueSource {
return set == null ? 0 : set.cardinality();
}
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getShardValue(int slot) throws IOException {
HLL hll = sets[slot];
if (hll == null) return NO_VALUES;
@ -179,7 +183,7 @@ public class HLLAgg extends StrAggValueSource {
class NumericAcc extends BaseNumericAcc {
NumericDocValues values;
public NumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
public NumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
super(fcontext, field, numSlots);
}
@ -205,7 +209,7 @@ public class HLLAgg extends StrAggValueSource {
class SortedNumericAcc extends BaseNumericAcc {
SortedNumericDocValues values;
public SortedNumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
public SortedNumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
super(fcontext, field, numSlots);
}

View File

@ -270,6 +270,7 @@ public class LegacyFacet {
getCurrentSubs().put(key, sub);
}
@SuppressWarnings({"unchecked", "rawtypes"})
private Map<String,Object> getCurrentSubs() {
if (currentSubs == null) {
currentSubs = new LinkedHashMap();

View File

@ -46,7 +46,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
ValueSource vs = getArg();
SchemaField sf = null;
@ -116,7 +116,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
}
// TODO: can this be replaced by ComparableMerger?
private class NumericMerger extends FacetDoubleMerger {
private class NumericMerger extends FacetModule.FacetDoubleMerger {
double val = Double.NaN;
@Override
@ -133,9 +133,11 @@ public class MinMaxAgg extends SimpleAggValueSource {
}
}
private class ComparableMerger extends FacetSortableMerger {
private class ComparableMerger extends FacetModule.FacetSortableMerger {
@SuppressWarnings("rawtypes")
Comparable val;
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public void merge(Object facetResult, Context mcontext) {
Comparable other = (Comparable)facetResult;
if (val == null) {
@ -153,7 +155,8 @@ public class MinMaxAgg extends SimpleAggValueSource {
}
@Override
public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
@SuppressWarnings({"unchecked"})
public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) {
// NOTE: we don't use the minmax multiplier here because we still want natural ordering between slots (i.e. min(field) asc and max(field) asc) both sort "A" before "Z")
return this.val.compareTo(((ComparableMerger)other).val);
}
@ -164,7 +167,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
private int currentSlot;
int[] result;
public MinMaxUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public MinMaxUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
result = new int[numSlots];
Arrays.fill(result, MISSING);
@ -233,8 +236,8 @@ public class MinMaxAgg extends SimpleAggValueSource {
}
}
class DFuncAcc extends DoubleFuncSlotAcc {
public DFuncAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
class DFuncAcc extends SlotAcc.DoubleFuncSlotAcc {
public DFuncAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots, Double.NaN);
}
@ -260,9 +263,9 @@ public class MinMaxAgg extends SimpleAggValueSource {
}
}
class LFuncAcc extends LongFuncSlotAcc {
class LFuncAcc extends SlotAcc.LongFuncSlotAcc {
FixedBitSet exists;
public LFuncAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
public LFuncAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots, 0);
exists = new FixedBitSet(numSlots);
}
@ -320,9 +323,9 @@ public class MinMaxAgg extends SimpleAggValueSource {
}
class DateFuncAcc extends LongFuncSlotAcc {
class DateFuncAcc extends SlotAcc.LongFuncSlotAcc {
private static final long MISSING = Long.MIN_VALUE;
public DateFuncAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
public DateFuncAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots, MISSING);
}
@ -351,7 +354,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
SchemaField field;
int[] slotOrd;
public OrdAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
public OrdAcc(FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
super(fcontext);
this.field = field;
slotOrd = new int[numSlots];
@ -394,7 +397,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
LongValues toGlobal;
SortedDocValues subDv;
public SingleValuedOrdAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
public SingleValuedOrdAcc(FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
super(fcontext, field, numSlots);
}
@ -450,7 +453,7 @@ public class MinMaxAgg extends SimpleAggValueSource {
SortedSetDocValues subDv;
long[] slotOrd;
public MinMaxSortedSetDVAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
public MinMaxSortedSetDVAcc(FacetContext fcontext, SchemaField field, int numSlots) throws IOException {
super(fcontext, field);
this.slotOrd = new long[numSlots];
Arrays.fill(slotOrd, MISSING);

View File

@ -37,7 +37,7 @@ public class MissingAgg extends SimpleAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
ValueSource vs = getArg();
if (vs instanceof FieldNameValueSource) {
@ -61,12 +61,12 @@ public class MissingAgg extends SimpleAggValueSource {
@Override
public FacetMerger createFacetMerger(Object prototype) {
return new FacetLongMerger();
return new FacetModule.FacetLongMerger();
}
class MissingSlotAcc extends LongFuncSlotAcc {
class MissingSlotAcc extends SlotAcc.LongFuncSlotAcc {
public MissingSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
public MissingSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots, 0);
}

View File

@ -49,7 +49,7 @@ public class PercentileAgg extends SimpleAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
ValueSource vs = getArg();
if (vs instanceof FieldNameValueSource) {
@ -132,12 +132,12 @@ public class PercentileAgg extends SimpleAggValueSource {
return lst;
}
class Acc extends FuncSlotAcc {
class Acc extends SlotAcc.FuncSlotAcc {
protected AVLTreeDigest[] digests;
protected ByteBuffer buf;
protected double[] sortvals;
public Acc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
public Acc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
digests = new AVLTreeDigest[numSlots];
}
@ -220,7 +220,7 @@ public class PercentileAgg extends SimpleAggValueSource {
protected ByteBuffer buf;
double[] sortvals;
public BasePercentileDVAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public BasePercentileDVAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf);
digests = new AVLTreeDigest[numSlots];
}
@ -289,7 +289,7 @@ public class PercentileAgg extends SimpleAggValueSource {
class PercentileSortedNumericAcc extends BasePercentileDVAcc {
SortedNumericDocValues values;
public PercentileSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public PercentileSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
}
@ -339,7 +339,7 @@ public class PercentileAgg extends SimpleAggValueSource {
class PercentileSortedSetAcc extends BasePercentileDVAcc {
SortedSetDocValues values;
public PercentileSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public PercentileSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
}
@ -376,7 +376,7 @@ public class PercentileAgg extends SimpleAggValueSource {
protected double[] sortvals;
private int currentSlot;
public PercentileUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public PercentileUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
digests = new AVLTreeDigest[numSlots];
}
@ -465,7 +465,7 @@ public class PercentileAgg extends SimpleAggValueSource {
}
}
class Merger extends FacetSortableMerger {
class Merger extends FacetModule.FacetSortableMerger {
protected AVLTreeDigest digest;
protected Double sortVal;
@ -488,7 +488,7 @@ public class PercentileAgg extends SimpleAggValueSource {
}
@Override
public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) {
return Double.compare(getSortVal(), ((Merger) other).getSortVal());
}

View File

@ -117,12 +117,12 @@ public class RelatednessAgg extends AggValueSource {
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
public FunctionValues getValues(@SuppressWarnings("rawtypes") Map context, LeafReaderContext readerContext) throws IOException {
throw new UnsupportedOperationException("NOT IMPLEMENTED " + name + " " + this);
}
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
// TODO: Ideally this is where we should check fgQ/bgQ for 'null' and apply defaults...
//
// we want to walk up the fcontext and inherit the queries from any ancestor SKGAgg
@ -135,7 +135,7 @@ public class RelatednessAgg extends AggValueSource {
List<Query> fgFilters = new ArrayList<Query>(3);
fgFilters.add(fgQ);
for (FacetRequest.FacetContext ctx = fcontext; ctx != null; ctx = ctx.parent) {
for (FacetContext ctx = fcontext; ctx != null; ctx = ctx.parent) {
if (null != ctx.filter) {
fgFilters.add(ctx.filter);
} else {
@ -170,7 +170,7 @@ public class RelatednessAgg extends AggValueSource {
private final DocSet bgSet;
private final long fgSize;
private final long bgSize;
public SKGSlotAcc(final RelatednessAgg agg, final FacetRequest.FacetContext fcontext, final int numSlots,
public SKGSlotAcc(final RelatednessAgg agg, final FacetContext fcontext, final int numSlots,
final DocSet fgSet, final DocSet bgSet) throws IOException {
super(fcontext);
this.agg = agg;
@ -253,6 +253,7 @@ public class RelatednessAgg extends AggValueSource {
slotVal.incSizes(fgSize, bgSize);
}
@SuppressWarnings({"rawtypes"})
SimpleOrderedMap res = slotVal.externalize(fcontext.isShard());
return res;
}
@ -403,6 +404,8 @@ public class RelatednessAgg extends AggValueSource {
* @see SlotAcc#getValue
* @see Merger#getMergedResult
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public SimpleOrderedMap externalize(final boolean isShardRequest) {
SimpleOrderedMap result = new SimpleOrderedMap<Number>();
@ -429,7 +432,7 @@ public class RelatednessAgg extends AggValueSource {
/**
* Merges in the per shard {@link BucketData} output into a unified {@link BucketData}
*/
private static final class Merger extends FacetSortableMerger {
private static final class Merger extends FacetModule.FacetSortableMerger {
private final BucketData mergedData;
public Merger(final RelatednessAgg agg) {
this.mergedData = new BucketData(agg);
@ -437,13 +440,14 @@ public class RelatednessAgg extends AggValueSource {
@Override
public void merge(Object facetResult, Context mcontext) {
@SuppressWarnings({"unchecked"})
NamedList<Object> shardData = (NamedList<Object>)facetResult;
mergedData.incSizes((Long)shardData.remove(FG_SIZE), (Long)shardData.remove(BG_SIZE));
mergedData.incCounts((Long)shardData.remove(FG_COUNT), (Long)shardData.remove(BG_COUNT));
}
@Override
public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) {
// NOTE: regardless of the SortDirection hint, we want normal comparison of the BucketData
assert other instanceof Merger;

View File

@ -35,6 +35,7 @@ public abstract class SimpleAggValueSource extends AggValueSource {
}
@Override
@SuppressWarnings({"rawtypes"})
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
throw new UnsupportedOperationException();
}

View File

@ -44,11 +44,11 @@ import org.apache.solr.search.SolrIndexSearcher;
*/
public abstract class SlotAcc implements Closeable {
String key; // todo...
protected final FacetRequest.FacetContext fcontext;
protected final FacetContext fcontext;
protected LeafReaderContext currentReaderContext;
protected int currentDocBase;
public SlotAcc(FacetRequest.FacetContext fcontext) {
public SlotAcc(FacetContext fcontext) {
this.fcontext = fcontext;
}
@ -68,28 +68,28 @@ public abstract class SlotAcc implements Closeable {
}
/**
* All subclasses must override this method to collect documents. This method is called by the
* default impl of {@link #collect(DocSet,int,IntFunction)} but it's also neccessary if this accumulator
* All subclasses must override this method to collect documents. This method is called by the
* default impl of {@link #collect(DocSet, int, IntFunction)} but it's also neccessary if this accumulator
* is used for sorting.
*
* @param doc Single Segment docId (relative to the current {@link LeafReaderContext} to collect
* @param slot The slot number to collect this document in
* @param slotContext A callback that can be used for Accumulators that would like additional info
* about the current slot -- the {@link IntFunction} is only garunteed to be valid for
* the current slot, and the {@link SlotContext} returned is only valid for the duration
* of the <code>collect()</code> call.
* @param doc Single Segment docId (relative to the current {@link LeafReaderContext} to collect
* @param slot The slot number to collect this document in
* @param slotContext A callback that can be used for Accumulators that would like additional info
* about the current slot -- the {@link IntFunction} is only garunteed to be valid for
* the current slot, and the {@link SlotContext} returned is only valid for the duration
* of the <code>collect()</code> call.
*/
public abstract void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException;
/**
* Bulk collection of all documents in a slot. The default implementation calls {@link #collect(int,int,IntFunction)}
* Bulk collection of all documents in a slot. The default implementation calls {@link #collect(int, int, IntFunction)}
*
* @param docs (global) Documents to collect
* @param slot The slot number to collect these documents in
* @param slotContext A callback that can be used for Accumulators that would like additional info
* about the current slot -- the {@link IntFunction} is only garunteed to be valid for
* the current slot, and the {@link SlotContext} returned is only valid for the duration
* of the <code>collect()</code> call.
* @param docs (global) Documents to collect
* @param slot The slot number to collect these documents in
* @param slotContext A callback that can be used for Accumulators that would like additional info
* about the current slot -- the {@link IntFunction} is only garunteed to be valid for
* the current slot, and the {@link SlotContext} returned is only valid for the duration
* of the <code>collect()</code> call.
*/
public int collect(DocSet docs, int slot, IntFunction<SlotContext> slotContext) throws IOException {
int count = 0;
@ -101,7 +101,7 @@ public abstract class SlotAcc implements Closeable {
int segBase = 0;
int segMax;
int adjustedMax = 0;
for (DocIterator docsIt = docs.iterator(); docsIt.hasNext();) {
for (DocIterator docsIt = docs.iterator(); docsIt.hasNext(); ) {
final int doc = docsIt.nextDoc();
if (doc >= adjustedMax) {
do {
@ -135,16 +135,24 @@ public abstract class SlotAcc implements Closeable {
}
}
/** Called to reset the acc to a fresh state, ready for reuse */
/**
* Called to reset the acc to a fresh state, ready for reuse
*/
public abstract void reset() throws IOException;
/** Typically called from setNextReader to reset docValue iterators */
protected void resetIterators() throws IOException {};
/**
* Typically called from setNextReader to reset docValue iterators
*/
protected void resetIterators() throws IOException {
}
;
public abstract void resize(Resizer resizer);
@Override
public void close() throws IOException {}
public void close() throws IOException {
}
public static abstract class Resizer {
public abstract int getNewSize();
@ -206,7 +214,7 @@ public abstract class SlotAcc implements Closeable {
FixedBitSet values = new FixedBitSet(getNewSize());
int oldSize = old.length();
for(int oldSlot = 0;;) {
for (int oldSlot = 0; ; ) {
oldSlot = values.nextSetBit(oldSlot);
if (oldSlot == DocIdSetIterator.NO_MORE_DOCS) break;
int newSlot = getNewSlot(oldSlot);
@ -218,6 +226,7 @@ public abstract class SlotAcc implements Closeable {
}
public <T> T[] resize(T[] old, T defaultValue) {
@SuppressWarnings({"unchecked"})
T[] values = (T[]) Array.newInstance(old.getClass().getComponentType(), getNewSize());
if (defaultValue != null) {
Arrays.fill(values, 0, values.length, defaultValue);
@ -237,37 +246,39 @@ public abstract class SlotAcc implements Closeable {
} // end class Resizer
/**
* Incapsulates information about the current slot, for Accumulators that may want
* Incapsulates information about the current slot, for Accumulators that may want
* additional info during collection.
*/
public static final class SlotContext {
private final Query slotQuery;
public SlotContext(Query slotQuery) {
this.slotQuery = slotQuery;
}
public Query getSlotQuery() {
return slotQuery;
}
}
}
// TODO: we should really have a decoupled value provider...
// TODO: we should really have a decoupled value provider...
// This would enhance reuse and also prevent multiple lookups of same value across diff stats
abstract class FuncSlotAcc extends SlotAcc {
protected final ValueSource valueSource;
protected FunctionValues values;
abstract static class FuncSlotAcc extends SlotAcc {
protected final ValueSource valueSource;
protected FunctionValues values;
public FuncSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
super(fcontext);
this.valueSource = values;
}
public FuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(fcontext);
this.valueSource = values;
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
super.setNextReader(readerContext);
values = valueSource.getValues(fcontext.qcontext, readerContext);
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
super.setNextReader(readerContext);
values = valueSource.getValues(fcontext.qcontext, readerContext);
}
}
}
// have a version that counts the number of times a Slot has been hit? (for avg... what else?)
@ -275,393 +286,395 @@ abstract class FuncSlotAcc extends SlotAcc {
// double-slot-func -> func-slot -> slot -> acc
// double-slot-func -> double-slot -> slot -> acc
abstract class DoubleFuncSlotAcc extends FuncSlotAcc {
double[] result; // TODO: use DoubleArray
double initialValue;
abstract static class DoubleFuncSlotAcc extends FuncSlotAcc {
double[] result; // TODO: use DoubleArray
double initialValue;
public DoubleFuncSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
this(values, fcontext, numSlots, 0);
}
public DoubleFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
this(values, fcontext, numSlots, 0);
}
public DoubleFuncSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots, double initialValue) {
super(values, fcontext, numSlots);
this.initialValue = initialValue;
result = new double[numSlots];
if (initialValue != 0) {
reset();
public DoubleFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots, double initialValue) {
super(values, fcontext, numSlots);
this.initialValue = initialValue;
result = new double[numSlots];
if (initialValue != 0) {
reset();
}
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slot) {
return result[slot];
}
@Override
public void reset() {
Arrays.fill(result, initialValue);
}
@Override
public void resize(Resizer resizer) {
result = resizer.resize(result, initialValue);
}
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(result[slotA], result[slotB]);
}
abstract static class LongFuncSlotAcc extends FuncSlotAcc {
long[] result;
long initialValue;
@Override
public Object getValue(int slot) {
return result[slot];
}
public LongFuncSlotAcc(ValueSource values, FacetContext fcontext, int numSlots, long initialValue) {
super(values, fcontext, numSlots);
this.initialValue = initialValue;
result = new long[numSlots];
if (initialValue != 0) {
reset();
}
}
@Override
public void reset() {
Arrays.fill(result, initialValue);
}
@Override
public int compare(int slotA, int slotB) {
return Long.compare(result[slotA], result[slotB]);
}
@Override
public void resize(Resizer resizer) {
result = resizer.resize(result, initialValue);
}
}
@Override
public Object getValue(int slot) {
return result[slot];
}
abstract class LongFuncSlotAcc extends FuncSlotAcc {
long[] result;
long initialValue;
@Override
public void reset() {
Arrays.fill(result, initialValue);
}
public LongFuncSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots, long initialValue) {
super(values, fcontext, numSlots);
this.initialValue = initialValue;
result = new long[numSlots];
if (initialValue != 0) {
reset();
@Override
public void resize(Resizer resizer) {
result = resizer.resize(result, initialValue);
}
}
@Override
public int compare(int slotA, int slotB) {
return Long.compare(result[slotA], result[slotB]);
}
abstract class IntSlotAcc extends SlotAcc {
int[] result; // use LongArray32
int initialValue;
@Override
public Object getValue(int slot) {
return result[slot];
}
public IntSlotAcc(FacetContext fcontext, int numSlots, int initialValue) {
super(fcontext);
this.initialValue = initialValue;
result = new int[numSlots];
if (initialValue != 0) {
reset();
}
}
@Override
public void reset() {
Arrays.fill(result, initialValue);
}
@Override
public int compare(int slotA, int slotB) {
return Integer.compare(result[slotA], result[slotB]);
}
@Override
public void resize(Resizer resizer) {
result = resizer.resize(result, initialValue);
}
}
@Override
public Object getValue(int slot) {
return result[slot];
}
abstract class IntSlotAcc extends SlotAcc {
int[] result; // use LongArray32
int initialValue;
@Override
public void reset() {
Arrays.fill(result, initialValue);
}
public IntSlotAcc(FacetRequest.FacetContext fcontext, int numSlots, int initialValue) {
super(fcontext);
this.initialValue = initialValue;
result = new int[numSlots];
if (initialValue != 0) {
reset();
@Override
public void resize(Resizer resizer) {
result = resizer.resize(result, initialValue);
}
}
@Override
public int compare(int slotA, int slotB) {
return Integer.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slot) {
return result[slot];
}
@Override
public void reset() {
Arrays.fill(result, initialValue);
}
@Override
public void resize(Resizer resizer) {
result = resizer.resize(result, initialValue);
}
}
class SumSlotAcc extends DoubleFuncSlotAcc {
public SumSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
}
public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
double val = values.doubleVal(doc); // todo: worth trying to share this value across multiple stats that need it?
result[slotNum] += val;
}
}
class SumsqSlotAcc extends DoubleFuncSlotAcc {
public SumsqSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
}
@Override
public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
double val = values.doubleVal(doc);
val = val * val;
result[slotNum] += val;
}
}
class AvgSlotAcc extends DoubleFuncSlotAcc {
int[] counts;
public AvgSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
counts = new int[numSlots];
}
@Override
public void reset() {
super.reset();
for (int i = 0; i < counts.length; i++) {
counts[i] = 0;
static class SumSlotAcc extends DoubleFuncSlotAcc {
public SumSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
}
}
@Override
public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
double val = values.doubleVal(doc);
if (val != 0 || values.exists(doc)) {
public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
double val = values.doubleVal(doc); // todo: worth trying to share this value across multiple stats that need it?
result[slotNum] += val;
counts[slotNum] += 1;
}
}
private double avg(int slot) {
return AggUtil.avg(result[slot], counts[slot]); // calc once and cache in result?
}
static class SumsqSlotAcc extends DoubleFuncSlotAcc {
public SumsqSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(avg(slotA), avg(slotB));
}
@Override
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(2);
lst.add(counts[slot]);
lst.add(result[slot]);
return lst;
} else {
return avg(slot);
@Override
public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
double val = values.doubleVal(doc);
val = val * val;
result[slotNum] += val;
}
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
counts = resizer.resize(counts, 0);
}
}
class VarianceSlotAcc extends DoubleFuncSlotAcc {
int[] counts;
double[] sum;
static class AvgSlotAcc extends DoubleFuncSlotAcc {
int[] counts;
public VarianceSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
counts = new int[numSlots];
sum = new double[numSlots];
}
public AvgSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
counts = new int[numSlots];
}
@Override
public void reset() {
super.reset();
Arrays.fill(counts, 0);
Arrays.fill(sum, 0);
}
@Override
public void reset() {
super.reset();
for (int i = 0; i < counts.length; i++) {
counts[i] = 0;
}
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
this.counts = resizer.resize(this.counts, 0);
this.sum = resizer.resize(this.sum, 0);
}
@Override
public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) throws IOException {
double val = values.doubleVal(doc);
if (val != 0 || values.exists(doc)) {
result[slotNum] += val;
counts[slotNum] += 1;
}
}
private double variance(int slot) {
return AggUtil.variance(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
}
private double avg(int slot) {
return AggUtil.avg(result[slot], counts[slot]); // calc once and cache in result?
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(this.variance(slotA), this.variance(slotB));
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(avg(slotA), avg(slotB));
}
@Override
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(3);
lst.add(counts[slot]);
lst.add(result[slot]);
lst.add(sum[slot]);
return lst;
} else {
return this.variance(slot);
@Override
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList<Object> lst = new ArrayList<>(2);
lst.add(counts[slot]);
lst.add(result[slot]);
return lst;
} else {
return avg(slot);
}
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
counts = resizer.resize(counts, 0);
}
}
@Override
public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
double val = values.doubleVal(doc);
if (values.exists(doc)) {
counts[slot]++;
result[slot] += val * val;
sum[slot] += val;
static class VarianceSlotAcc extends DoubleFuncSlotAcc {
int[] counts;
double[] sum;
public VarianceSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
counts = new int[numSlots];
sum = new double[numSlots];
}
}
}
class StddevSlotAcc extends DoubleFuncSlotAcc {
int[] counts;
double[] sum;
@Override
public void reset() {
super.reset();
Arrays.fill(counts, 0);
Arrays.fill(sum, 0);
}
public StddevSlotAcc(ValueSource values, FacetRequest.FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
counts = new int[numSlots];
sum = new double[numSlots];
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
this.counts = resizer.resize(this.counts, 0);
this.sum = resizer.resize(this.sum, 0);
}
@Override
public void reset() {
super.reset();
Arrays.fill(counts, 0);
Arrays.fill(sum, 0);
}
private double variance(int slot) {
return AggUtil.variance(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
this.counts = resizer.resize(this.counts, 0);
this.result = resizer.resize(this.result, 0);
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(this.variance(slotA), this.variance(slotB));
}
private double stdDev(int slot) {
return AggUtil.stdDev(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
}
@Override
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList<Object> lst = new ArrayList<>(3);
lst.add(counts[slot]);
lst.add(result[slot]);
lst.add(sum[slot]);
return lst;
} else {
return this.variance(slot);
}
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(this.stdDev(slotA), this.stdDev(slotB));
}
@Override
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(3);
lst.add(counts[slot]);
lst.add(result[slot]);
lst.add(sum[slot]);
return lst;
} else {
return this.stdDev(slot);
@Override
public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
double val = values.doubleVal(doc);
if (values.exists(doc)) {
counts[slot]++;
result[slot] += val * val;
sum[slot] += val;
}
}
}
@Override
public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
double val = values.doubleVal(doc);
if (values.exists(doc)) {
counts[slot]++;
result[slot] += val * val;
sum[slot] += val;
static class StddevSlotAcc extends DoubleFuncSlotAcc {
int[] counts;
double[] sum;
public StddevSlotAcc(ValueSource values, FacetContext fcontext, int numSlots) {
super(values, fcontext, numSlots);
counts = new int[numSlots];
sum = new double[numSlots];
}
@Override
public void reset() {
super.reset();
Arrays.fill(counts, 0);
Arrays.fill(sum, 0);
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
this.counts = resizer.resize(this.counts, 0);
this.result = resizer.resize(this.result, 0);
}
private double stdDev(int slot) {
return AggUtil.stdDev(result[slot], sum[slot], counts[slot]); // calc once and cache in result?
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(this.stdDev(slotA), this.stdDev(slotB));
}
@Override
@SuppressWarnings({"unchecked"})
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList<Object> lst = new ArrayList<>(3);
lst.add(counts[slot]);
lst.add(result[slot]);
lst.add(sum[slot]);
return lst;
} else {
return this.stdDev(slot);
}
}
@Override
public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
double val = values.doubleVal(doc);
if (values.exists(doc)) {
counts[slot]++;
result[slot] += val * val;
sum[slot] += val;
}
}
}
abstract static class CountSlotAcc extends SlotAcc {
public CountSlotAcc(FacetContext fcontext) {
super(fcontext);
}
public abstract void incrementCount(int slot, long count);
public abstract long getCount(int slot);
}
static class CountSlotArrAcc extends CountSlotAcc {
long[] result;
public CountSlotArrAcc(FacetContext fcontext, int numSlots) {
super(fcontext);
result = new long[numSlots];
}
@Override
public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) {
// TODO: count arrays can use fewer bytes based on the number of docs in
// the base set (that's the upper bound for single valued) - look at ttf?
result[slotNum]++;
}
@Override
public int compare(int slotA, int slotB) {
return Long.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void incrementCount(int slot, long count) {
result[slot] += count;
}
@Override
public long getCount(int slot) {
return result[slot];
}
// internal and expert
long[] getCountArray() {
return result;
}
@Override
public void reset() {
Arrays.fill(result, 0);
}
@Override
public void resize(Resizer resizer) {
result = resizer.resize(result, 0);
}
}
static class SortSlotAcc extends SlotAcc {
public SortSlotAcc(FacetContext fcontext) {
super(fcontext);
}
@Override
public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
// no-op
}
@Override
public int compare(int slotA, int slotB) {
return slotA - slotB;
}
@Override
public Object getValue(int slotNum) {
return slotNum;
}
@Override
public void reset() {
// no-op
}
@Override
public void resize(Resizer resizer) {
// sort slot only works with direct-mapped accumulators
throw new UnsupportedOperationException();
}
}
}
abstract class CountSlotAcc extends SlotAcc {
public CountSlotAcc(FacetRequest.FacetContext fcontext) {
super(fcontext);
}
public abstract void incrementCount(int slot, long count);
public abstract long getCount(int slot);
}
class CountSlotArrAcc extends CountSlotAcc {
long[] result;
public CountSlotArrAcc(FacetRequest.FacetContext fcontext, int numSlots) {
super(fcontext);
result = new long[numSlots];
}
@Override
public void collect(int doc, int slotNum, IntFunction<SlotContext> slotContext) {
// TODO: count arrays can use fewer bytes based on the number of docs in
// the base set (that's the upper bound for single valued) - look at ttf?
result[slotNum]++;
}
@Override
public int compare(int slotA, int slotB) {
return Long.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void incrementCount(int slot, long count) {
result[slot] += count;
}
@Override
public long getCount(int slot) {
return result[slot];
}
// internal and expert
long[] getCountArray() {
return result;
}
@Override
public void reset() {
Arrays.fill(result, 0);
}
@Override
public void resize(Resizer resizer) {
result = resizer.resize(result, 0);
}
}
class SortSlotAcc extends SlotAcc {
public SortSlotAcc(FacetRequest.FacetContext fcontext) {
super(fcontext);
}
@Override
public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
// no-op
}
@Override
public int compare(int slotA, int slotB) {
return slotA - slotB;
}
@Override
public Object getValue(int slotNum) {
return slotNum;
}
@Override
public void reset() {
// no-op
}
@Override
public void resize(Resizer resizer) {
// sort slot only works with direct-mapped accumulators
throw new UnsupportedOperationException();
}
}

View File

@ -32,7 +32,7 @@ public class StddevAgg extends SimpleAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
ValueSource vs = getArg();
if (vs instanceof FieldNameValueSource) {
@ -57,7 +57,7 @@ public class StddevAgg extends SimpleAggValueSource {
}
vs = sf.getType().getValueSource(sf, null);
}
return new StddevSlotAcc(vs, fcontext, numSlots);
return new SlotAcc.StddevSlotAcc(vs, fcontext, numSlots);
}
@Override
@ -65,7 +65,7 @@ public class StddevAgg extends SimpleAggValueSource {
return new Merger();
}
private static class Merger extends FacetDoubleMerger {
private static class Merger extends FacetModule.FacetDoubleMerger {
long count;
double sumSq;
double sum;
@ -90,9 +90,9 @@ public class StddevAgg extends SimpleAggValueSource {
}
}
class StddevSortedNumericAcc extends SDVSortedNumericAcc {
class StddevSortedNumericAcc extends DocValuesAcc.SDVSortedNumericAcc {
public StddevSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public StddevSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
}
@ -102,9 +102,9 @@ public class StddevAgg extends SimpleAggValueSource {
}
}
class StddevSortedSetAcc extends SDVSortedSetAcc {
class StddevSortedSetAcc extends DocValuesAcc.SDVSortedSetAcc {
public StddevSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public StddevSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
}
@ -114,9 +114,9 @@ public class StddevAgg extends SimpleAggValueSource {
}
}
class StddevUnInvertedFieldAcc extends SDVUnInvertedFieldAcc {
class StddevUnInvertedFieldAcc extends UnInvertedFieldAcc.SDVUnInvertedFieldAcc {
public StddevUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public StddevUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
}

View File

@ -34,7 +34,7 @@ public class SumAgg extends SimpleAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
ValueSource vs = getArg();
if (vs instanceof FieldNameValueSource) {
@ -59,7 +59,7 @@ public class SumAgg extends SimpleAggValueSource {
}
vs = sf.getType().getValueSource(sf, null);
}
return new SumSlotAcc(vs, fcontext, numSlots);
return new SlotAcc.SumSlotAcc(vs, fcontext, numSlots);
}
@Override
@ -67,7 +67,7 @@ public class SumAgg extends SimpleAggValueSource {
return new Merger();
}
public static class Merger extends FacetDoubleMerger {
public static class Merger extends FacetModule.FacetDoubleMerger {
double val;
@Override
@ -80,9 +80,9 @@ public class SumAgg extends SimpleAggValueSource {
}
}
class SumSortedNumericAcc extends DoubleSortedNumericDVAcc {
class SumSortedNumericAcc extends DocValuesAcc.DoubleSortedNumericDVAcc {
public SumSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public SumSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
}
@ -95,9 +95,9 @@ public class SumAgg extends SimpleAggValueSource {
}
class SumSortedSetAcc extends DoubleSortedSetDVAcc {
class SumSortedSetAcc extends DocValuesAcc.DoubleSortedSetDVAcc {
public SumSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public SumSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
}
@ -113,9 +113,9 @@ public class SumAgg extends SimpleAggValueSource {
}
}
class SumUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
class SumUnInvertedFieldAcc extends UnInvertedFieldAcc.DoubleUnInvertedFieldAcc {
public SumUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public SumUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
}

View File

@ -33,7 +33,7 @@ public class SumsqAgg extends SimpleAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
ValueSource vs = getArg();
if (vs instanceof FieldNameValueSource) {
@ -58,7 +58,7 @@ public class SumsqAgg extends SimpleAggValueSource {
}
vs = sf.getType().getValueSource(sf, null);
}
return new SumsqSlotAcc(vs, fcontext, numSlots);
return new SlotAcc.SumsqSlotAcc(vs, fcontext, numSlots);
}
@Override
@ -66,9 +66,9 @@ public class SumsqAgg extends SimpleAggValueSource {
return new SumAgg.Merger();
}
class SumSqSortedNumericAcc extends DoubleSortedNumericDVAcc {
class SumSqSortedNumericAcc extends DocValuesAcc.DoubleSortedNumericDVAcc {
public SumSqSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public SumSqSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
}
@ -81,9 +81,9 @@ public class SumsqAgg extends SimpleAggValueSource {
}
}
class SumSqSortedSetAcc extends DoubleSortedSetDVAcc {
class SumSqSortedSetAcc extends DocValuesAcc.DoubleSortedSetDVAcc {
public SumSqSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public SumSqSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
}
@ -99,9 +99,9 @@ public class SumsqAgg extends SimpleAggValueSource {
}
}
class SumSqUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
class SumSqUnInvertedFieldAcc extends UnInvertedFieldAcc.DoubleUnInvertedFieldAcc {
public SumSqUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public SumSqUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
}

View File

@ -315,7 +315,7 @@ public class UnInvertedField extends DocTermOrds {
private void getCounts(FacetFieldProcessorByArrayUIF processor, CountSlotAcc counts) throws IOException {
private void getCounts(FacetFieldProcessorByArrayUIF processor, SlotAcc.CountSlotAcc counts) throws IOException {
DocSet docs = processor.fcontext.base;
int baseSize = docs.size();
int maxDoc = searcher.maxDoc();
@ -427,7 +427,7 @@ public class UnInvertedField extends DocTermOrds {
DocSet docs = processor.fcontext.base;
int uniqueTerms = 0;
final CountSlotAcc countAcc = processor.countAcc;
final SlotAcc.CountSlotAcc countAcc = processor.countAcc;
for (TopTerm tt : bigTerms.values()) {
if (tt.termNum >= startTermIndex && tt.termNum < endTermIndex) {

View File

@ -36,7 +36,7 @@ public abstract class UnInvertedFieldAcc extends SlotAcc implements UnInvertedFi
UnInvertedField.DocToTerm docToTerm;
SchemaField sf;
public UnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public UnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext);
this.sf = sf;
uif = UnInvertedField.getUnInvertedField(sf.getName(), fcontext.qcontext.searcher());
@ -51,109 +51,111 @@ public abstract class UnInvertedFieldAcc extends SlotAcc implements UnInvertedFi
docToTerm = null;
}
}
}
abstract class DoubleUnInvertedFieldAcc extends UnInvertedFieldAcc {
double[] result;
int currentSlot;
double initialValue;
public DoubleUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException {
super(fcontext, sf, numSlots);
result = new double[numSlots];
if (initialValue != 0) {
this.initialValue = initialValue;
abstract static class DoubleUnInvertedFieldAcc extends UnInvertedFieldAcc {
double[] result;
int currentSlot;
double initialValue;
public DoubleUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots, double initialValue) throws IOException {
super(fcontext, sf, numSlots);
result = new double[numSlots];
if (initialValue != 0) {
this.initialValue = initialValue;
Arrays.fill(result, initialValue);
}
}
@Override
public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
this.currentSlot = slot;
docToTerm.getBigTerms(doc + currentDocBase, this);
docToTerm.getSmallTerms(doc + currentDocBase, this);
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void reset() throws IOException {
Arrays.fill(result, initialValue);
}
}
@Override
public void collect(int doc, int slot, IntFunction<SlotContext> slotContext) throws IOException {
this.currentSlot = slot;
docToTerm.getBigTerms(doc + currentDocBase, this);
docToTerm.getSmallTerms(doc + currentDocBase, this);
}
@Override
public int compare(int slotA, int slotB) {
return Double.compare(result[slotA], result[slotB]);
}
@Override
public Object getValue(int slotNum) throws IOException {
return result[slotNum];
}
@Override
public void reset() throws IOException {
Arrays.fill(result, initialValue);
}
@Override
public void resize(Resizer resizer) {
@Override
public void resize(Resizer resizer) {
this.result = resizer.resize(result, initialValue);
}
}
/**
* Base accumulator to compute standard deviation and variance for uninvertible fields
*/
abstract class SDVUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
int[] counts;
double[] sum;
public SDVUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
this.counts = new int[numSlots];
this.sum = new double[numSlots];
}
@Override
public void call(int termNum) {
try {
BytesRef term = docToTerm.lookupOrd(termNum);
Object obj = sf.getType().toObject(sf, term);
double val = obj instanceof Date ? ((Date)obj).getTime(): ((Number)obj).doubleValue();
result[currentSlot] += val * val;
sum[currentSlot]+= val;
counts[currentSlot]++;
} catch (IOException e) {
// find a better way to do it
throw new UncheckedIOException(e);
}
}
protected abstract double computeVal(int slot);
/**
* Base accumulator to compute standard deviation and variance for uninvertible fields
*/
abstract static class SDVUnInvertedFieldAcc extends DoubleUnInvertedFieldAcc {
int[] counts;
double[] sum;
@Override
public int compare(int slotA, int slotB) {
return Double.compare(computeVal(slotA), computeVal(slotB));
}
@Override
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(3);
lst.add(counts[slot]);
lst.add(result[slot]);
lst.add(sum[slot]);
return lst;
} else {
return computeVal(slot);
public SDVUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots, 0);
this.counts = new int[numSlots];
this.sum = new double[numSlots];
}
}
@Override
public void reset() throws IOException {
super.reset();
Arrays.fill(counts, 0);
Arrays.fill(sum, 0);
}
@Override
public void call(int termNum) {
try {
BytesRef term = docToTerm.lookupOrd(termNum);
Object obj = sf.getType().toObject(sf, term);
double val = obj instanceof Date ? ((Date) obj).getTime() : ((Number) obj).doubleValue();
result[currentSlot] += val * val;
sum[currentSlot] += val;
counts[currentSlot]++;
} catch (IOException e) {
// find a better way to do it
throw new UncheckedIOException(e);
}
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
protected abstract double computeVal(int slot);
@Override
public int compare(int slotA, int slotB) {
return Double.compare(computeVal(slotA), computeVal(slotB));
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getValue(int slot) {
if (fcontext.isShard()) {
ArrayList lst = new ArrayList(3);
lst.add(counts[slot]);
lst.add(result[slot]);
lst.add(sum[slot]);
return lst;
} else {
return computeVal(slot);
}
}
@Override
public void reset() throws IOException {
super.reset();
Arrays.fill(counts, 0);
Arrays.fill(sum, 0);
}
@Override
public void resize(Resizer resizer) {
super.resize(resizer);
this.counts = resizer.resize(counts, 0);
this.sum = resizer.resize(sum, 0);
}
}
}

View File

@ -42,7 +42,7 @@ public class UniqueAgg extends StrAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
SchemaField sf = fcontext.qcontext.searcher().getSchema().getField(getArg());
if (sf.multiValued() || sf.getType().multiValuedFieldCache()) {
if (sf.getType().isPointField()) {
@ -66,7 +66,7 @@ public class UniqueAgg extends StrAggValueSource {
return new Merger();
}
private static class Merger extends FacetSortableMerger {
private static class Merger extends FacetModule.FacetSortableMerger {
long answer = -1;
long sumUnique;
Set<Object> values;
@ -75,6 +75,7 @@ public class UniqueAgg extends StrAggValueSource {
long shardsMissingMax;
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public void merge(Object facetResult, Context mcontext) {
SimpleOrderedMap map = (SimpleOrderedMap)facetResult;
long unique = ((Number)map.get(UNIQUE)).longValue();
@ -117,7 +118,7 @@ public class UniqueAgg extends StrAggValueSource {
}
@Override
public int compareTo(FacetSortableMerger other, FacetRequest.SortDirection direction) {
public int compareTo(FacetModule.FacetSortableMerger other, FacetRequest.SortDirection direction) {
return Long.compare( getLong(), ((Merger)other).getLong() );
}
}
@ -126,7 +127,7 @@ public class UniqueAgg extends StrAggValueSource {
static abstract class BaseNumericAcc extends DocValuesAcc {
LongSet[] sets;
public BaseNumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
public BaseNumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
super(fcontext, fcontext.qcontext.searcher().getSchema().getField(field));
sets = new LongSet[numSlots];
}
@ -177,6 +178,7 @@ public class UniqueAgg extends StrAggValueSource {
return set == null ? 0 : set.cardinality();
}
@SuppressWarnings({"unchecked", "rawtypes"})
public Object getShardValue(int slot) throws IOException {
LongSet set = sets[slot];
int unique = getCardinality(slot);
@ -212,7 +214,7 @@ public class UniqueAgg extends StrAggValueSource {
static class NumericAcc extends BaseNumericAcc {
NumericDocValues values;
public NumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
public NumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
super(fcontext, field, numSlots);
}
@ -235,7 +237,7 @@ public class UniqueAgg extends StrAggValueSource {
static class SortedNumericAcc extends BaseNumericAcc {
SortedNumericDocValues values;
public SortedNumericAcc(FacetRequest.FacetContext fcontext, String field, int numSlots) throws IOException {
public SortedNumericAcc(FacetContext fcontext, String field, int numSlots) throws IOException {
super(fcontext, field, numSlots);
}

View File

@ -27,7 +27,7 @@ public abstract class UniqueBlockAgg extends UniqueAgg {
protected int[] lastSeenValuesPerSlot;
protected UniqueBlockSlotAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots)
protected UniqueBlockSlotAcc(FacetContext fcontext, SchemaField field, int numSlots)
throws IOException { //
super(fcontext, field, /*numSlots suppressing inherited accumulator */0, null);
counts = new int[numSlots];
@ -79,10 +79,10 @@ public abstract class UniqueBlockAgg extends UniqueAgg {
}
@Override
public abstract SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException ;
public abstract SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException ;
@Override
public FacetMerger createFacetMerger(Object prototype) {
return new FacetLongMerger() ;
return new FacetModule.FacetLongMerger() ;
}
}

View File

@ -27,7 +27,7 @@ public class UniqueBlockFieldAgg extends UniqueBlockAgg {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
final String fieldName = getArg();
SchemaField sf = fcontext.qcontext.searcher().getSchema().getField(fieldName);
if (sf.multiValued() || sf.getType().multiValuedFieldCache()) {

View File

@ -34,7 +34,7 @@ public class UniqueBlockQueryAgg extends UniqueBlockAgg {
private Query query;
private BitSet parentBitSet;
private UniqueBlockQuerySlotAcc(FacetRequest.FacetContext fcontext, Query query, int numSlots)
private UniqueBlockQuerySlotAcc(FacetContext fcontext, Query query, int numSlots)
throws IOException { //
super(fcontext, null, numSlots);
this.query = query;
@ -65,7 +65,7 @@ public class UniqueBlockQueryAgg extends UniqueBlockAgg {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
return new UniqueBlockQuerySlotAcc(fcontext, query, numSlots);
}
}

View File

@ -36,7 +36,7 @@ class UniqueMultiDvSlotAcc extends UniqueSlotAcc {
LongValues toGlobal;
SortedSetDocValues subDv;
public UniqueMultiDvSlotAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
public UniqueMultiDvSlotAcc(FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
super(fcontext, field, numSlots, factory);
}

View File

@ -29,7 +29,7 @@ class UniqueMultivaluedSlotAcc extends UniqueSlotAcc implements UnInvertedField.
private UnInvertedField uif;
private UnInvertedField.DocToTerm docToTerm;
public UniqueMultivaluedSlotAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
public UniqueMultivaluedSlotAcc(FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
super(fcontext, field, numSlots, factory);
SolrIndexSearcher searcher = fcontext.qcontext.searcher();
uif = UnInvertedField.getUnInvertedField(field.getName(), searcher);

View File

@ -36,7 +36,7 @@ class UniqueSinglevaluedSlotAcc extends UniqueSlotAcc {
LongValues toGlobal;
SortedDocValues subDv;
public UniqueSinglevaluedSlotAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
public UniqueSinglevaluedSlotAcc(FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
super(fcontext, field, numSlots, factory);
}

View File

@ -35,7 +35,7 @@ abstract class UniqueSlotAcc extends SlotAcc {
int[] counts; // populated with the cardinality once
int nTerms;
public UniqueSlotAcc(FacetRequest.FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
public UniqueSlotAcc(FacetContext fcontext, SchemaField field, int numSlots, HLLAgg.HLLFactory factory) throws IOException {
super(fcontext);
this.factory = factory;
arr = new FixedBitSet[numSlots];
@ -74,6 +74,7 @@ abstract class UniqueSlotAcc extends SlotAcc {
return res;
}
@SuppressWarnings({"unchecked", "rawtypes"})
private Object getShardHLL(int slot) throws IOException {
FixedBitSet ords = arr[slot];
if (ords == null) return HLLAgg.NO_VALUES;
@ -97,6 +98,7 @@ abstract class UniqueSlotAcc extends SlotAcc {
return map;
}
@SuppressWarnings({"unchecked", "rawtypes"})
private Object getShardValue(int slot) throws IOException {
if (factory != null) return getShardHLL(slot);
FixedBitSet ords = arr[slot];

View File

@ -31,7 +31,7 @@ public class VarianceAgg extends SimpleAggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
ValueSource vs = getArg();
if (vs instanceof FieldNameValueSource) {
@ -56,7 +56,7 @@ public class VarianceAgg extends SimpleAggValueSource {
}
vs = sf.getType().getValueSource(sf, null);
}
return new VarianceSlotAcc(vs, fcontext, numSlots);
return new SlotAcc.VarianceSlotAcc(vs, fcontext, numSlots);
}
@Override
@ -64,7 +64,7 @@ public class VarianceAgg extends SimpleAggValueSource {
return new Merger();
}
private static class Merger extends FacetDoubleMerger {
private static class Merger extends FacetModule.FacetDoubleMerger {
long count;
double sumSq;
double sum;
@ -89,9 +89,9 @@ public class VarianceAgg extends SimpleAggValueSource {
}
}
class VarianceSortedNumericAcc extends SDVSortedNumericAcc {
class VarianceSortedNumericAcc extends DocValuesAcc.SDVSortedNumericAcc {
public VarianceSortedNumericAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public VarianceSortedNumericAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
}
@ -101,9 +101,9 @@ public class VarianceAgg extends SimpleAggValueSource {
}
}
class VarianceSortedSetAcc extends SDVSortedSetAcc {
class VarianceSortedSetAcc extends DocValuesAcc.SDVSortedSetAcc {
public VarianceSortedSetAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public VarianceSortedSetAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
}
@ -113,9 +113,9 @@ public class VarianceAgg extends SimpleAggValueSource {
}
}
class VarianceUnInvertedFieldAcc extends SDVUnInvertedFieldAcc {
class VarianceUnInvertedFieldAcc extends UnInvertedFieldAcc.SDVUnInvertedFieldAcc {
public VarianceUnInvertedFieldAcc(FacetRequest.FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
public VarianceUnInvertedFieldAcc(FacetContext fcontext, SchemaField sf, int numSlots) throws IOException {
super(fcontext, sf, numSlots);
}

View File

@ -258,10 +258,10 @@ public class TestSnapshotCloudManager extends SolrCloudTestCase {
if (!treeOne.equals(treeTwo)) {
List<String> t1 = new ArrayList<>(treeOne);
t1.removeAll(treeTwo);
log.warn("Only in tree one: " + t1);
log.warn("Only in tree one: {}", t1);
List<String> t2 = new ArrayList<>(treeTwo);
t2.removeAll(treeOne);
log.warn("Only in tree two: " + t2);
log.warn("Only in tree two: {}", t2);
}
assertEquals(treeOne, treeTwo);
for (String path : treeOne) {

View File

@ -211,7 +211,7 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
methodsExpectedNotOverriden.add("public default void org.apache.solr.common.util.JsonTextWriter.writeIterator(org.apache.solr.common.IteratorWriter) throws java.io.IOException");
methodsExpectedNotOverriden.add("public default void org.apache.solr.common.util.JsonTextWriter.writeJsonIter(java.util.Iterator) throws java.io.IOException");
final Class<?> subClass = ArrayOfNameTypeValueJSONWriter.class;
final Class<?> subClass = JSONResponseWriter.ArrayOfNameTypeValueJSONWriter.class;
final Class<?> superClass = subClass.getSuperclass();
List<Method> allSuperClassMethods = new ArrayList<>();
@ -256,7 +256,7 @@ public class JSONWriterTest extends SolrTestCaseJ4 {
@Test
public void testArrntvWriterLacksMethodsOfItsOwn() {
final Class<?> subClass = ArrayOfNameTypeValueJSONWriter.class;
final Class<?> subClass = JSONResponseWriter.ArrayOfNameTypeValueJSONWriter.class;
final Class<?> superClass = subClass.getSuperclass();
// ArrayOfNamedValuePairJSONWriter is a simple sub-class
// which should have (almost) no methods of its own

View File

@ -68,7 +68,7 @@ class DebugAgg extends AggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) throws IOException {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) throws IOException {
return new Acc(fcontext, numDocs, numSlots, inner.createSlotAcc(fcontext, numDocs, numSlots));
}
@ -94,7 +94,7 @@ class DebugAgg extends AggValueSource {
public long numDocs;
public int numSlots;
public Acc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots, SlotAcc sub) {
public Acc(FacetContext fcontext, long numDocs, int numSlots, SlotAcc sub) {
super(fcontext);
this.last = this;
this.numDocs = numDocs;
@ -173,7 +173,7 @@ class DebugAgg extends AggValueSource {
}
@Override
public SlotAcc createSlotAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) {
public SlotAcc createSlotAcc(FacetContext fcontext, long numDocs, int numSlots) {
return new NumShardsAcc(fcontext, numDocs, numSlots);
}
@ -188,7 +188,7 @@ class DebugAgg extends AggValueSource {
}
public static class NumShardsAcc extends SlotAcc {
public NumShardsAcc(FacetRequest.FacetContext fcontext, long numDocs, int numSlots) {
public NumShardsAcc(FacetContext fcontext, long numDocs, int numSlots) {
super(fcontext);
}
@ -221,7 +221,7 @@ class DebugAgg extends AggValueSource {
@Override
public FacetMerger createFacetMerger(Object prototype) {
return new FacetLongMerger();
return new FacetModule.FacetLongMerger();
}
}

View File

@ -136,7 +136,7 @@ public class TestJsonFacetRefinement extends SolrTestCaseHS {
try {
int nShards = responsesAndTests.length / 2;
Object jsonFacet = Utils.fromJSONString(facet);
FacetRequest.FacetParser parser = new FacetRequest.FacetTopParser(req);
FacetParser parser = new FacetRequest.FacetTopParser(req);
FacetRequest facetRequest = parser.parse(jsonFacet);
FacetMerger merger = null;