SOLR-6216: Better faceting for multiple intervals on DV fields. Thanks Tomas

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1612889 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Erick Erickson 2014-07-23 17:43:23 +00:00
parent d3231b9ff9
commit 2d1056e06f
11 changed files with 2046 additions and 90 deletions

View File

@ -155,6 +155,9 @@ New Features
* SOLR-6263: Add DIH handler name to variable resolver as ${dih.handlerName}. (ehatcher)
* SOLR-6216: Better faceting for multiple intervals on DV fields (Tomas Fernandez-Lobbe
via Erick Erickson)
Bug Fixes
----------------------

View File

@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@ -326,92 +327,16 @@ public class FacetComponent extends SearchComponent
}
// Distributed facet_dates
//
// The implementation below uses the first encountered shard's
// facet_dates as the basis for subsequent shards' data to be merged.
// (the "NOW" param should ensure consistency)
@SuppressWarnings("unchecked")
SimpleOrderedMap<SimpleOrderedMap<Object>> facet_dates =
(SimpleOrderedMap<SimpleOrderedMap<Object>>)
facet_counts.get("facet_dates");
if (facet_dates != null) {
doDistribDates(fi, facet_counts);
// go through each facet_date
for (Map.Entry<String,SimpleOrderedMap<Object>> entry : facet_dates) {
final String field = entry.getKey();
if (fi.dateFacets.get(field) == null) {
// first time we've seen this field, no merging
fi.dateFacets.add(field, entry.getValue());
} else {
// not the first time, merge current field
SimpleOrderedMap<Object> shardFieldValues
= entry.getValue();
SimpleOrderedMap<Object> existFieldValues
= fi.dateFacets.get(field);
for (Map.Entry<String,Object> existPair : existFieldValues) {
final String key = existPair.getKey();
if (key.equals("gap") ||
key.equals("end") ||
key.equals("start")) {
// we can skip these, must all be the same across shards
continue;
}
// can be null if inconsistencies in shards responses
Integer newValue = (Integer) shardFieldValues.get(key);
if (null != newValue) {
Integer oldValue = ((Integer) existPair.getValue());
existPair.setValue(oldValue + newValue);
}
}
}
}
}
// Distributed facet_ranges
//
// The implementation below uses the first encountered shard's
// facet_ranges as the basis for subsequent shards' data to be merged.
@SuppressWarnings("unchecked")
SimpleOrderedMap<SimpleOrderedMap<Object>> facet_ranges =
(SimpleOrderedMap<SimpleOrderedMap<Object>>)
facet_counts.get("facet_ranges");
if (facet_ranges != null) {
doDistribRanges(fi, facet_counts);
// go through each facet_range
for (Map.Entry<String,SimpleOrderedMap<Object>> entry : facet_ranges) {
final String field = entry.getKey();
if (fi.rangeFacets.get(field) == null) {
// first time we've seen this field, no merging
fi.rangeFacets.add(field, entry.getValue());
} else {
// not the first time, merge current field counts
// Distributed facet_intervals
doDistribIntervals(fi, facet_counts);
@SuppressWarnings("unchecked")
NamedList<Integer> shardFieldValues
= (NamedList<Integer>) entry.getValue().get("counts");
@SuppressWarnings("unchecked")
NamedList<Integer> existFieldValues
= (NamedList<Integer>) fi.rangeFacets.get(field).get("counts");
for (Map.Entry<String,Integer> existPair : existFieldValues) {
final String key = existPair.getKey();
// can be null if inconsistencies in shards responses
Integer newValue = shardFieldValues.get(key);
if (null != newValue) {
Integer oldValue = existPair.getValue();
existPair.setValue(oldValue + newValue);
}
}
}
}
}
}
//
@ -480,6 +405,145 @@ public class FacetComponent extends SearchComponent
}
}
//
// The implementation below uses the first encountered shard's
// facet_intervals as the basis for subsequent shards' data to be merged.
private void doDistribIntervals(FacetInfo fi, NamedList facet_counts) {
@SuppressWarnings("unchecked")
SimpleOrderedMap<SimpleOrderedMap<Integer>> facet_intervals =
(SimpleOrderedMap<SimpleOrderedMap<Integer>>)
facet_counts.get("facet_intervals");
if (facet_intervals != null) {
for (Map.Entry<String, SimpleOrderedMap<Integer>> entry : facet_intervals) {
final String field = entry.getKey();
SimpleOrderedMap<Integer> existingCounts = fi.intervalFacets.get(field);
if (existingCounts == null) {
// first time we've seen this field, no merging
fi.intervalFacets.add(field, entry.getValue());
} else {
// not the first time, merge current field counts
Iterator<Map.Entry<String, Integer>> newItr = entry.getValue().iterator();
Iterator<Map.Entry<String, Integer>> exItr = existingCounts.iterator();
// all intervals should be returned by each shard, even if they have zero count,
// and in the same order
while (exItr.hasNext()) {
Map.Entry<String, Integer> exItem = exItr.next();
if (!newItr.hasNext()) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"Interval facet shard response missing key: " + exItem.getKey());
}
Map.Entry<String, Integer> newItem = newItr.next();
if (!newItem.getKey().equals(exItem.getKey())) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"Interval facet shard response has extra key: " + newItem.getKey());
}
exItem.setValue(exItem.getValue() + newItem.getValue());
}
if (newItr.hasNext()) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"Interval facet shard response has at least one extra key: "
+ newItr.next().getKey());
}
}
}
}
}
//
// The implementation below uses the first encountered shard's
// facet_ranges as the basis for subsequent shards' data to be merged.
private void doDistribRanges(FacetInfo fi, NamedList facet_counts) {
@SuppressWarnings("unchecked")
SimpleOrderedMap<SimpleOrderedMap<Object>> facet_ranges =
(SimpleOrderedMap<SimpleOrderedMap<Object>>)
facet_counts.get("facet_ranges");
if (facet_ranges != null) {
// go through each facet_range
for (Map.Entry<String,SimpleOrderedMap<Object>> entry : facet_ranges) {
final String field = entry.getKey();
if (fi.rangeFacets.get(field) == null) {
// first time we've seen this field, no merging
fi.rangeFacets.add(field, entry.getValue());
} else {
// not the first time, merge current field counts
@SuppressWarnings("unchecked")
NamedList<Integer> shardFieldValues
= (NamedList<Integer>) entry.getValue().get("counts");
@SuppressWarnings("unchecked")
NamedList<Integer> existFieldValues
= (NamedList<Integer>) fi.rangeFacets.get(field).get("counts");
for (Map.Entry<String,Integer> existPair : existFieldValues) {
final String key = existPair.getKey();
// can be null if inconsistencies in shards responses
Integer newValue = shardFieldValues.get(key);
if (null != newValue) {
Integer oldValue = existPair.getValue();
existPair.setValue(oldValue + newValue);
}
}
}
}
}
}
//
// The implementation below uses the first encountered shard's
// facet_dates as the basis for subsequent shards' data to be merged.
// (the "NOW" param should ensure consistency)
private void doDistribDates(FacetInfo fi, NamedList facet_counts) {
@SuppressWarnings("unchecked")
SimpleOrderedMap<SimpleOrderedMap<Object>> facet_dates =
(SimpleOrderedMap<SimpleOrderedMap<Object>>)
facet_counts.get("facet_dates");
if (facet_dates != null) {
// go through each facet_date
for (Map.Entry<String,SimpleOrderedMap<Object>> entry : facet_dates) {
final String field = entry.getKey();
if (fi.dateFacets.get(field) == null) {
// first time we've seen this field, no merging
fi.dateFacets.add(field, entry.getValue());
} else {
// not the first time, merge current field
SimpleOrderedMap<Object> shardFieldValues
= entry.getValue();
SimpleOrderedMap<Object> existFieldValues
= fi.dateFacets.get(field);
for (Map.Entry<String,Object> existPair : existFieldValues) {
final String key = existPair.getKey();
if (key.equals("gap") ||
key.equals("end") ||
key.equals("start")) {
// we can skip these, must all be the same across shards
continue;
}
// can be null if inconsistencies in shards responses
Integer newValue = (Integer) shardFieldValues.get(key);
if (null != newValue) {
Integer oldValue = ((Integer) existPair.getValue());
existPair.setValue(oldValue + newValue);
}
}
}
}
}
}
private void refineFacets(ResponseBuilder rb, ShardRequest sreq) {
FacetInfo fi = rb._facetInfo;
@ -589,6 +653,7 @@ public class FacetComponent extends SearchComponent
facet_counts.add("facet_dates", fi.dateFacets);
facet_counts.add("facet_ranges", fi.rangeFacets);
facet_counts.add("facet_intervals", fi.intervalFacets);
rb.rsp.add("facet_counts", facet_counts);
@ -637,6 +702,8 @@ public class FacetComponent extends SearchComponent
= new SimpleOrderedMap<>();
public SimpleOrderedMap<SimpleOrderedMap<Object>> rangeFacets
= new SimpleOrderedMap<>();
public SimpleOrderedMap<SimpleOrderedMap<Integer>> intervalFacets
= new SimpleOrderedMap<>();
void parse(SolrParams params, ResponseBuilder rb) {
queryFacets = new LinkedHashMap<>();

View File

@ -0,0 +1,750 @@
package org.apache.solr.request;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.request.IntervalFacets.FacetInterval;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.schema.TrieDateField;
import org.apache.solr.search.DocIterator;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SyntaxError;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Computes interval facets for docvalues field (single or multivalued).
* <p/>
* Given a set of intervals for a field and a DocSet, it calculates the number
* of documents that match each of the intervals provided. The final count for
* each interval should be exactly the same as the number of results of a range
* query using the DocSet and the range as filters. This means that the count
* of {@code facet.query=field:[A TO B]} should be the same as the count of
* {@code f.field.facet.interval.set=[A,B]}, however, this method will usually
* be faster in cases where there are a larger number of intervals per field.
* <p/>
* To use this class, create an instance using
* {@link #IntervalFacets(SchemaField, SolrIndexSearcher, DocSet, String[])}
* and then iterate the {@link FacetInterval} using {@link #iterator()}
* <p/>
* Intervals Format</br>
* Intervals must begin with either '(' or '[', be followed by the start value,
* then a comma ',', the end value, and finally ')' or ']'. For example:
* <ul>
* <li> (1,10) -&gt; will include values greater than 1 and lower than 10
* <li> [1,10) -&gt; will include values greater or equal to 1 and lower than 10
* <li> [1,10] -&gt; will include values greater or equal to 1 and lower or equal to 10
* </ul>
* The initial and end values can't be empty, if the interval needs to be unbounded,
* the special character '*' can be used for both, start and end limit. When using
* '*', '(' and '[', and ')' and ']' will be treated equal. [*,*] will include all
* documents with a value in the field<p>
* The interval limits may be strings, there is no need to add quotes, all the text
* until the comma will be treated as the start limit, and the text after that will be
* the end limit, for example: [Buenos Aires,New York]. Keep in mind that a string-like
* comparison will be done to match documents in string intervals (case-sensitive). The
* comparator can't be changed.
* Commas, brackets and square brackets can be escaped by using '\' in front of them.
* Whitespaces before and after the values will be omitted. Start limit can't be grater
* than the end limit. Equal limits are allowed.
* <p/>
* To use this class:
* <pre>
* IntervalFacets intervalFacets = new IntervalFacets(schemaField, searcher, docs, intervalStrs);
* for (FacetInterval interval : intervalFacets) {
* results.add(interval.getKey(), interval.getCount());
* }
* </pre>
*/
public class IntervalFacets implements Iterable<FacetInterval> {
private final SchemaField schemaField;
private final SolrIndexSearcher searcher;
private final DocSet docs;
private final FacetInterval[] intervals;
public IntervalFacets(SchemaField schemaField, SolrIndexSearcher searcher, DocSet docs, String[] intervals) throws SyntaxError, IOException {
this.schemaField = schemaField;
this.searcher = searcher;
this.docs = docs;
this.intervals = getSortedIntervals(intervals);
doCount();
}
private FacetInterval[] getSortedIntervals(String[] intervals) throws SyntaxError {
FacetInterval[] sortedIntervals = new FacetInterval[intervals.length];
int idx = 0;
for (String intervalStr : intervals) {
sortedIntervals[idx++] = new FacetInterval(schemaField, intervalStr);
}
/*
* This comparator sorts the intervals by start value from lower to greater
*/
Arrays.sort(sortedIntervals, new Comparator<FacetInterval>() {
@Override
public int compare(FacetInterval o1, FacetInterval o2) {
assert o1 != null;
assert o2 != null;
return compareStart(o1, o2);
}
private int compareStart(FacetInterval o1, FacetInterval o2) {
if (o1.start == null) {
if (o2.start == null) {
return 0;
}
return -1;
}
if (o2.start == null) {
return 1;
}
return o1.start.compareTo(o2.start);
}
});
return sortedIntervals;
}
private void doCount() throws IOException {
if (schemaField.getType().getNumericType() != null && !schemaField.multiValued()) {
getCountNumeric();
} else {
getCountString();
}
}
private void getCountNumeric() throws IOException {
final FieldType ft = schemaField.getType();
final String fieldName = schemaField.getName();
final NumericType numericType = ft.getNumericType();
if (numericType == null) {
throw new IllegalStateException();
}
final List<AtomicReaderContext> leaves = searcher.getIndexReader().leaves();
final Iterator<AtomicReaderContext> ctxIt = leaves.iterator();
AtomicReaderContext ctx = null;
NumericDocValues longs = null;
Bits docsWithField = null;
for (DocIterator docsIt = docs.iterator(); docsIt.hasNext(); ) {
final int doc = docsIt.nextDoc();
if (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc()) {
do {
ctx = ctxIt.next();
} while (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc());
assert doc >= ctx.docBase;
switch (numericType) {
case LONG:
longs = DocValues.getNumeric(ctx.reader(), fieldName);
break;
case INT:
longs = DocValues.getNumeric(ctx.reader(), fieldName);
break;
case FLOAT:
final NumericDocValues floats = DocValues.getNumeric(ctx.reader(), fieldName);
// TODO: this bit flipping should probably be moved to tie-break in the PQ comparator
longs = new NumericDocValues() {
@Override
public long get(int docID) {
long bits = floats.get(docID);
if (bits < 0) bits ^= 0x7fffffffffffffffL;
return bits;
}
};
break;
case DOUBLE:
final NumericDocValues doubles = DocValues.getNumeric(ctx.reader(), fieldName);
// TODO: this bit flipping should probably be moved to tie-break in the PQ comparator
longs = new NumericDocValues() {
@Override
public long get(int docID) {
long bits = doubles.get(docID);
if (bits < 0) bits ^= 0x7fffffffffffffffL;
return bits;
}
};
break;
default:
throw new AssertionError();
}
docsWithField = DocValues.getDocsWithField(ctx.reader(), schemaField.getName());
}
long v = longs.get(doc - ctx.docBase);
if (v != 0 || docsWithField.get(doc - ctx.docBase)) {
accumIntervalWithValue(v);
}
}
}
private void getCountString() throws IOException {
Filter filter = docs.getTopFilter();
List<AtomicReaderContext> leaves = searcher.getTopReaderContext().leaves();
for (int subIndex = 0; subIndex < leaves.size(); subIndex++) {
AtomicReaderContext leaf = leaves.get(subIndex);
DocIdSet dis = filter.getDocIdSet(leaf, null); // solr docsets already exclude any deleted docs
if (dis == null) {
continue;
}
DocIdSetIterator disi = dis.iterator();
if (disi != null) {
if (schemaField.multiValued()) {
SortedSetDocValues sub = leaf.reader().getSortedSetDocValues(schemaField.getName());
if (sub == null) {
continue;
}
final SortedDocValues singleton = DocValues.unwrapSingleton(sub);
if (singleton != null) {
// some codecs may optimize SORTED_SET storage for single-valued fields
accumIntervalsSingle(singleton, disi, dis.bits());
} else {
accumIntervalsMulti(sub, disi, dis.bits());
}
} else {
SortedDocValues sub = leaf.reader().getSortedDocValues(schemaField.getName());
if (sub == null) {
continue;
}
accumIntervalsSingle(sub, disi, dis.bits());
}
}
}
}
private void accumIntervalsMulti(SortedSetDocValues ssdv,
DocIdSetIterator disi, Bits bits) throws IOException {
// First update the ordinals in the intervals for this segment
for (FacetInterval interval : intervals) {
interval.updateContext(ssdv);
}
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
if (bits != null && bits.get(doc) == false) {
continue;
}
ssdv.setDocument(doc);
long currOrd;
int currentInterval = 0;
while ((currOrd = ssdv.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
boolean evaluateNextInterval = true;
while (evaluateNextInterval && currentInterval < intervals.length) {
IntervalCompareResult result = intervals[currentInterval].includes(currOrd);
switch (result) {
case INCLUDED:
/*
* Increment the current interval and move to the next one using
* the same value
*/
intervals[currentInterval].incCount();
currentInterval++;
break;
case LOWER_THAN_START:
/*
* None of the next intervals will match this value (all of them have
* higher start value). Move to the next value for this document.
*/
evaluateNextInterval = false;
break;
case GREATER_THAN_END:
/*
* Next interval may match this value
*/
currentInterval++;
break;
}
}
}
}
}
private void accumIntervalsSingle(SortedDocValues sdv, DocIdSetIterator disi, Bits bits) throws IOException {
// First update the ordinals in the intervals to this segment
for (FacetInterval interval : intervals) {
interval.updateContext(sdv);
}
int doc;
while ((doc = disi.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
if (bits != null && bits.get(doc) == false) {
continue;
}
int ord = sdv.getOrd(doc);
if (ord >= 0) {
accumInterval(ord);
}
}
}
private void accumInterval(int ordinal) {
assert ordinal >= 0;
accumIntervalWithValue(ordinal);
}
private void accumIntervalWithValue(long value) {
for (int i = 0; i < intervals.length; i++) {
FacetInterval interval = intervals[i];
IntervalCompareResult result = interval.includes(value);
if (result == IntervalCompareResult.INCLUDED) {
interval.incCount();
} else if (result == IntervalCompareResult.LOWER_THAN_START) {
// All intervals after this will have equal or grater start value,
// we can skip them
break;
}
}
}
static enum IntervalCompareResult {
LOWER_THAN_START,
INCLUDED,
GREATER_THAN_END,
}
/**
* Helper class to match and count of documents in specified intervals
*/
static class FacetInterval {
/**
* Key to represent this interval
*/
private final String key;
/**
* Start value for this interval as indicated in the request
*/
final BytesRef start;
/**
* End value for this interval as indicated in the request
*/
final BytesRef end;
/**
* Whether or not this interval includes or not the lower limit
*/
private final boolean startOpen;
/**
* Whether or not this interval includes or not the upper limit
*/
private final boolean endOpen;
/**
* Lower limit to which compare a document value. If the field in which we
* are faceting is single value numeric, then this number will be the
* {@code long} representation of {@link #start}, and in this case
* the limit doesn't need to be updated once it is set (will be set in the
* constructor and remain equal for the life of this object). If the field
* is multivalued and/or non-numeric, then this number will be the lower limit
* ordinal for a value to be included in this interval. In this case,
* {@link #startLimit} needs to be set using either {@link #updateContext(SortedDocValues)} or
* {@link #updateContext(SortedSetDocValues)} (depending on the field type) for
* every segment before calling {@link #includes(long)} for any document in the
* segment.
*/
private long startLimit;
/**
* Upper limit to which compare a document value. If the field in which we
* are faceting is single value numeric, then this number will be the
* {@code long} representation of {@link #end}, and in this case
* the limit doesn't need to be updated once it is set (will be set in the
* constructor and remain equal for the life of this object). If the field
* is multivalued and/or non-numeric, then this number will be the upper limit
* ordinal for a value to be included in this interval. In this case,
* {@link #endLimit} needs to be set using either {@link #updateContext(SortedDocValues)} or
* {@link #updateContext(SortedSetDocValues)} (depending on the field type) for
* every segment before calling {@link #includes(long)} for any document in the
* segment.
*/
private long endLimit;
/**
* The current count of documents in that match this interval
*/
private int count;
FacetInterval(SchemaField schemaField, String intervalStr) throws SyntaxError {
if (intervalStr == null) throw new SyntaxError("empty facet interval");
intervalStr = intervalStr.trim();
if (intervalStr.length() == 0) throw new SyntaxError("empty facet interval");
key = intervalStr;
if (intervalStr.charAt(0) == '(') {
startOpen = true;
} else if (intervalStr.charAt(0) == '[') {
startOpen = false;
} else {
throw new SyntaxError("Invalid start character " + intervalStr.charAt(0) + " in facet interval " + intervalStr);
}
final int lastNdx = intervalStr.length() - 1;
if (intervalStr.charAt(lastNdx) == ')') {
endOpen = true;
} else if (intervalStr.charAt(lastNdx) == ']') {
endOpen = false;
} else {
throw new SyntaxError("Invalid end character " + intervalStr.charAt(0) + " in facet interval " + intervalStr);
}
StringBuilder startStr = new StringBuilder(lastNdx);
int i = unescape(intervalStr, 1, lastNdx, startStr);
if (i == lastNdx) {
if (intervalStr.charAt(lastNdx - 1) == ',') {
throw new SyntaxError("Empty interval limit");
}
throw new SyntaxError("Missing unescaped comma separating interval ends in " + intervalStr);
}
try {
start = getLimitFromString(schemaField, startStr);
} catch (SyntaxError | SolrException e) {
throw new SyntaxError(String.format(Locale.ROOT, "Invalid start interval for key '%s': %s", key, e.getMessage()), e);
}
StringBuilder endStr = new StringBuilder(lastNdx);
i = unescape(intervalStr, i, lastNdx, endStr);
if (i != lastNdx) {
throw new SyntaxError("Extra unescaped comma at index " + i + " in interval " + intervalStr);
}
try {
end = getLimitFromString(schemaField, endStr);
} catch (SyntaxError | SolrException e) {
throw new SyntaxError(String.format(Locale.ROOT, "Invalid end interval for key '%s': %s", key, e.getMessage()), e);
}
// TODO: what about escaping star (*)?
// TODO: escaping spaces on ends?
if (schemaField.getType().getNumericType() != null) {
setNumericLimits(schemaField);
}
if (start != null && end != null && start.compareTo(end) > 0) {
throw new SyntaxError("Start is higher than end in interval for key: " + key);
}
}
/**
* Set startLimit and endLimit for numeric values. The limits in this case
* are going to be the <code>long</code> representation of the original
* value. <code>startLimit</code> will be incremented by one in case of the
* interval start being exclusive. <code>endLimit</code> will be decremented by
* one in case of the interval end being exclusive.
*/
private void setNumericLimits(SchemaField schemaField) {
if (start == null) {
startLimit = Long.MIN_VALUE;
} else {
switch (schemaField.getType().getNumericType()) {
case LONG:
if (schemaField.getType() instanceof TrieDateField) {
startLimit = ((Date) schemaField.getType().toObject(schemaField, start)).getTime();
} else {
startLimit = (long) schemaField.getType().toObject(schemaField, start);
}
break;
case INT:
startLimit = ((Integer) schemaField.getType().toObject(schemaField, start)).longValue();
break;
case FLOAT:
startLimit = NumericUtils.floatToSortableInt((float) schemaField.getType().toObject(schemaField, start));
break;
case DOUBLE:
startLimit = NumericUtils.doubleToSortableLong((double) schemaField.getType().toObject(schemaField, start));
break;
default:
throw new AssertionError();
}
if (startOpen) {
startLimit++;
}
}
if (end == null) {
endLimit = Long.MAX_VALUE;
} else {
switch (schemaField.getType().getNumericType()) {
case LONG:
if (schemaField.getType() instanceof TrieDateField) {
endLimit = ((Date) schemaField.getType().toObject(schemaField, end)).getTime();
} else {
endLimit = (long) schemaField.getType().toObject(schemaField, end);
}
break;
case INT:
endLimit = ((Integer) schemaField.getType().toObject(schemaField, end)).longValue();
break;
case FLOAT:
endLimit = NumericUtils.floatToSortableInt((float) schemaField.getType().toObject(schemaField, end));
break;
case DOUBLE:
endLimit = NumericUtils.doubleToSortableLong((double) schemaField.getType().toObject(schemaField, end));
break;
default:
throw new AssertionError();
}
if (endOpen) {
endLimit--;
}
}
}
private BytesRef getLimitFromString(SchemaField schemaField, StringBuilder builder) throws SyntaxError {
String value = builder.toString().trim();
if (value.length() == 0) {
throw new SyntaxError("Empty interval limit");
}
if ("*".equals(value)) {
return null;
}
return new BytesRef(schemaField.getType().toInternal(value));
}
/**
* Update the ordinals based on the current reader. This method
* (or {@link #updateContext(SortedSetDocValues)} depending on the
* DocValues type) needs to be called for every reader before
* {@link #includes(long)} is called on any document of the reader.
*
* @param sdv DocValues for the current reader
*/
public void updateContext(SortedDocValues sdv) {
if (start == null) {
/*
* Unset start. All ordinals will be greater than -1.
*/
startLimit = -1;
} else {
startLimit = sdv.lookupTerm(start);
if (startLimit < 0) {
/*
* The term was not found in this segment. We'll use inserting-point as
* start ordinal (then, to be included in the interval, an ordinal needs to be
* greater or equal to startLimit)
*/
startLimit = (startLimit * -1) - 1;
} else {
/*
* The term exists in this segment, If the interval has start open (the limit is
* excluded), then we move one ordinal higher. Then, to be included in the
* interval, an ordinal needs to be greater or equal to startLimit
*/
if (startOpen) {
startLimit++;
}
}
}
if (end == null) {
/*
* Unset end. All ordinals will be lower than Long.MAX_VALUE.
*/
endLimit = Long.MAX_VALUE;
} else {
endLimit = sdv.lookupTerm(end);
if (endLimit < 0) {
/*
* The term was not found in this segment. We'll use insertion-point -1 as
* endLimit. To be included in this interval, ordinals must be lower or
* equal to endLimit
*/
endLimit = (endLimit * -1) - 2;
} else {
if (endOpen) {
/*
* The term exists in this segment, If the interval has start open (the
* limit is excluded), then we move one ordinal lower. Then, to be
* included in the interval, an ordinal needs to be lower or equal to
* endLimit
*/
endLimit--;
}
}
}
}
/**
* Update the ordinals based on the current reader. This method
* (or {@link #updateContext(SortedDocValues)} depending on the
* DocValues type) needs to be called for every reader before
* {@link #includes(long)} is called on any document of the reader.
*
* @param sdv DocValues for the current reader
*/
public void updateContext(SortedSetDocValues sdv) {
if (start == null) {
/*
* Unset start. All ordinals will be greater than -1.
*/
startLimit = -1;
} else {
startLimit = sdv.lookupTerm(start);
if (startLimit < 0) {
/*
* The term was not found in this segment. We'll use inserting-point as
* start ordinal (then, to be included in the interval, an ordinal needs to be
* greater or equal to startLimit)
*/
startLimit = (startLimit * -1) - 1;
} else {
/*
* The term exists in this segment, If the interval has start open (the limit is
* excluded), then we move one ordinal higher. Then, to be included in the
* interval, an ordinal needs to be greater or equal to startLimit
*/
if (startOpen) {
startLimit++;
}
}
}
if (end == null) {
/*
* Unset end. All ordinals will be lower than Long.MAX_VALUE.
*/
endLimit = Long.MAX_VALUE;
} else {
endLimit = sdv.lookupTerm(end);
if (endLimit < 0) {
/*
* The term was not found in this segment. We'll use insertion-point -1 as
* endLimit. To be included in this interval, ordinals must be lower or
* equal to endLimit
*/
endLimit = (endLimit * -1) - 2;
} else {
/*
* The term exists in this segment, If the interval has start open (the
* limit is excluded), then we move one ordinal lower. Then, to be
* included in the interval, an ordinal needs to be lower or equal to
* endLimit
*/
if (endOpen) {
endLimit--;
}
}
}
}
/**
* Method to use to check whether a document should be counted for
* an interval or not. Before calling this method on a multi-valued
* and/or non-numeric field make sure you call {@link #updateContext(SortedDocValues)}
* or {@link #updateContext(SortedSetDocValues)} (depending on the DV type). It
* is OK to call this method without other previous calls on numeric fields
* (with {@link NumericDocValues})
*
* @param value For numeric single value fields, this {@code value}
* should be the {@code long} representation of the value of the document
* in the specified field. For multi-valued and/or non-numeric fields, {@code value}
* should be the ordinal of the term in the current segment
* @return <ul><li>{@link IntervalCompareResult#INCLUDED} if the value is included in the interval
* <li>{@link IntervalCompareResult#GREATER_THAN_END} if the value is greater than {@code endLimit}
* <li>{@link IntervalCompareResult#LOWER_THAN_START} if the value is lower than {@code startLimit}
* </ul>
* @see NumericUtils#floatToSortableInt(float)
* @see NumericUtils#doubleToSortableLong(double)
*/
public IntervalCompareResult includes(long value) {
if (startLimit > value) {
return IntervalCompareResult.LOWER_THAN_START;
}
if (endLimit < value) {
return IntervalCompareResult.GREATER_THAN_END;
}
return IntervalCompareResult.INCLUDED;
}
/* Fill in sb with a string from i to the first unescaped comma, or n.
Return the index past the unescaped comma, or n if no unescaped comma exists */
private int unescape(String s, int i, int n, StringBuilder sb) throws SyntaxError {
for (; i < n; ++i) {
char c = s.charAt(i);
if (c == '\\') {
++i;
if (i < n) {
c = s.charAt(i);
} else {
throw new SyntaxError("Unfinished escape at index " + i + " in facet interval " + s);
}
} else if (c == ',') {
return i + 1;
}
sb.append(c);
}
return n;
}
@Override
public String toString() {
return this.getClass().getSimpleName() +
" [key=" + key + ", start=" + start + ", end=" + end
+ ", startOpen=" + startOpen + ", endOpen=" + endOpen + "]";
}
/**
* @return The count of document that matched this interval
*/
public int getCount() {
return this.count;
}
/**
* Increment the number of documents that match this interval
*/
void incCount() {
this.count++;
}
/**
* @return Human readable key for this interval
*/
public String getKey() {
return this.key;
}
}
/**
* Iterate over all the intervals
*/
@Override
public Iterator<FacetInterval> iterator() {
return new ArrayList<FacetInterval>(Arrays.asList(intervals)).iterator();
}
}

View File

@ -52,6 +52,7 @@ import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.handler.component.ResponseBuilder;
import org.apache.solr.request.IntervalFacets.FacetInterval;
import org.apache.solr.schema.BoolField;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.IndexSchema;
@ -240,6 +241,7 @@ public class SimpleFacets {
* @see #getFacetFieldCounts
* @see #getFacetDateCounts
* @see #getFacetRangeCounts
* @see #getFacetIntervalCounts
* @see FacetParams#FACET
* @return a NamedList of Facet Count info or null
*/
@ -255,6 +257,7 @@ public class SimpleFacets {
facetResponse.add("facet_fields", getFacetFieldCounts());
facetResponse.add("facet_dates", getFacetDateCounts());
facetResponse.add("facet_ranges", getFacetRangeCounts());
facetResponse.add("facet_intervals", getFacetIntervalCounts());
} catch (IOException e) {
throw new SolrException(ErrorCode.SERVER_ERROR, e);
@ -1403,6 +1406,41 @@ public class SimpleFacets {
return dmp.parseMath(gap);
}
}
/**
* Returns a <code>NamedList</code> with each entry having the "key" of the interval as name and the count of docs
* in that interval as value. All intervals added in the request are included in the returned
* <code>NamedList</code> (included those with 0 count), and it's required that the order of the intervals
* is deterministic and equals in all shards of a distributed request, otherwise the collation of results
* will fail.
*
*/
public NamedList<Object> getFacetIntervalCounts() throws IOException, SyntaxError {
NamedList<Object> res = new SimpleOrderedMap<Object>();
String[] fields = params.getParams(FacetParams.FACET_INTERVAL);
if (fields == null || fields.length == 0) return res;
for (String field : fields) {
parseParams(FacetParams.FACET_INTERVAL, field);
String[] intervalStrs = required.getFieldParams(field, FacetParams.FACET_INTERVAL_SET);
SchemaField schemaField = searcher.getCore().getLatestSchema().getField(field);
if (!schemaField.hasDocValues()) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Interval Faceting only on fields with doc values");
}
if (params.getBool(GroupParams.GROUP_FACET, false)) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Interval Faceting can't be used with " + GroupParams.GROUP_FACET);
}
SimpleOrderedMap<Integer> fieldResults = new SimpleOrderedMap<Integer>();
res.add(field, fieldResults);
IntervalFacets intervalFacets = new IntervalFacets(schemaField, searcher, docs, intervalStrs);
for (FacetInterval interval : intervalFacets) {
fieldResults.add(interval.getKey(), interval.getCount());
}
}
return res;
}
}

View File

@ -0,0 +1,82 @@
<?xml version="1.0" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<schema name="test" version="1.5">
<types>
<fieldType name="int" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="float" class="solr.TrieFloatField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="date" class="solr.TrieDateField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldtype name="string" class="solr.StrField" sortMissingLast="true"/>
<fieldtype name="boolean" class="solr.BoolField" sortMissingLast="true"/>
</types>
<fields>
<field name="id" type="string" indexed="true" stored="true" docValues="false" multiValued="false" required="true"/>
<field name="id_dv" type="string" indexed="false" stored="false" docValues="true" multiValued="false" required="true"/>
<dynamicField name="*_i" type="int" indexed="true" stored="false" docValues="false"/>
<dynamicField name="*_i_dv" type="int" indexed="false" stored="false" docValues="true"/>
<dynamicField name="*_is" type="int" indexed="true" stored="false" docValues="false" multiValued="true"/>
<dynamicField name="*_is_dv" type="int" indexed="false" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_s" type="string" indexed="true" stored="false" docValues="false"/>
<dynamicField name="*_s_dv" type="string" indexed="false" stored="false" docValues="true"/>
<dynamicField name="*_ss" type="string" indexed="true" stored="false" docValues="false" multiValued="true"/>
<dynamicField name="*_ss_dv" type="string" indexed="false" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_f" type="float" indexed="true" stored="false" docValues="false"/>
<dynamicField name="*_f_dv" type="float" indexed="true" stored="false" docValues="true"/>
<dynamicField name="*_fs_dv" type="float" indexed="true" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_l" type="long" indexed="true" stored="false" docValues="false"/>
<dynamicField name="*_l_dv" type="long" indexed="true" stored="false" docValues="true"/>
<dynamicField name="*_ls_dv" type="long" indexed="true" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_d" type="double" indexed="true" stored="false" docValues="false"/>
<dynamicField name="*_d_dv" type="double" indexed="true" stored="false" docValues="true"/>
<dynamicField name="*_ds_dv" type="double" indexed="true" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_dt" type="date" indexed="true" stored="false" docValues="false"/>
<dynamicField name="*_dt_dv" type="date" indexed="true" stored="false" docValues="true"/>
<dynamicField name="*_dts_dv" type="date" indexed="true" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
<dynamicField name="*_ti1" type="int" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_tl" type="long" indexed="true" stored="true"/>
<dynamicField name="*_tl1" type="long" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_tf" type="float" indexed="true" stored="true"/>
<dynamicField name="*_tf1" type="float" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_td" type="double" indexed="true" stored="true"/>
<dynamicField name="*_td1" type="double" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_tds" type="double" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_tdt" type="date" indexed="true" stored="true"/>
<dynamicField name="*_tdt1" type="date" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_i1" type="int" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_l1" type="long" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_f1" type="float" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_d1" type="double" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_dt1" type="date" indexed="true" stored="true" multiValued="false"/>
</fields>
<defaultSearchField>id</defaultSearchField>
<uniqueKey>id</uniqueKey>
<copyField source="*_i" dest="*_i_dv" />
<copyField source="*_f" dest="*_f_dv" />
<copyField source="*_is" dest="*_is_dv" />
<copyField source="*_s" dest="*_s_dv" />
<copyField source="*_ss" dest="*_ss_dv" />
<copyField source="id" dest="id_dv" />
</schema>

View File

@ -20,6 +20,9 @@
<types>
<fieldType name="int" class="solr.TrieIntField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="float" class="solr.TrieFloatField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="long" class="solr.TrieLongField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldType name="date" class="solr.TrieDateField" precisionStep="0" omitNorms="true" positionIncrementGap="0"/>
<fieldtype name="string" class="solr.StrField" sortMissingLast="true"/>
</types>
@ -35,7 +38,17 @@
<dynamicField name="*_ss" type="string" indexed="true" stored="false" docValues="false" multiValued="true"/>
<dynamicField name="*_ss_dv" type="string" indexed="false" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_f" type="float" indexed="true" stored="false" docValues="false"/>
<dynamicField name="*_f_dv" type="float" indexed="false" stored="false" docValues="true"/>
<dynamicField name="*_f_dv" type="float" indexed="true" stored="false" docValues="true"/>
<dynamicField name="*_fs_dv" type="float" indexed="true" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_l" type="long" indexed="true" stored="false" docValues="false"/>
<dynamicField name="*_l_dv" type="long" indexed="true" stored="false" docValues="true"/>
<dynamicField name="*_ls_dv" type="long" indexed="true" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_d" type="double" indexed="true" stored="false" docValues="false"/>
<dynamicField name="*_d_dv" type="double" indexed="true" stored="false" docValues="true"/>
<dynamicField name="*_ds_dv" type="double" indexed="true" stored="false" docValues="true" multiValued="true"/>
<dynamicField name="*_dt" type="date" indexed="true" stored="false" docValues="false"/>
<dynamicField name="*_dt_dv" type="date" indexed="true" stored="false" docValues="true"/>
<dynamicField name="*_dts_dv" type="date" indexed="true" stored="false" docValues="true" multiValued="true"/>
</fields>
<defaultSearchField>id</defaultSearchField>

View File

@ -0,0 +1,157 @@
package org.apache.solr;
import java.util.Arrays;
import java.util.Comparator;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.junit.BeforeClass;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@Slow
@LuceneTestCase.SuppressCodecs({"Lucene40", "Lucene41", "Lucene42", "Lucene43"})
public class DistributedIntervalFacetingTest extends
BaseDistributedSearchTestCase {
@BeforeClass
public static void beforeSuperClass() throws Exception {
schemaString = "schema-distrib-interval-faceting.xml";
configString = "solrconfig-basic.xml";
}
@Override
public void doTest() throws Exception {
del("*:*");
commit();
testRandom();
}
private void testRandom() throws Exception {
// All field values will be a number between 0 and cardinality
int cardinality = 1000000;
// Fields to use for interval faceting
String[] fields = new String[]{"test_s_dv", "test_i_dv", "test_l_dv", "test_f_dv", "test_d_dv",
"test_ss_dv", "test_is_dv", "test_fs_dv", "test_ls_dv", "test_ds_dv"};
for (int i = 0; i < atLeast(500); i++) {
if (random().nextInt(50) == 0) {
//have some empty docs
indexr("id", String.valueOf(i));
continue;
}
if (random().nextInt(100) == 0 && i > 0) {
//delete some docs
del("id:" + String.valueOf(i - 1));
}
Object[] docFields = new Object[(random().nextInt(5)) * 10 + 12];
docFields[0] = "id";
docFields[1] = String.valueOf(i);
docFields[2] = "test_s_dv";
docFields[3] = String.valueOf(random().nextInt(cardinality));
docFields[4] = "test_i_dv";
docFields[5] = String.valueOf(random().nextInt(cardinality));
docFields[6] = "test_l_dv";
docFields[7] = String.valueOf(random().nextInt(cardinality));
docFields[8] = "test_f_dv";
docFields[9] = String.valueOf(random().nextFloat() * cardinality);
docFields[10] = "test_d_dv";
docFields[11] = String.valueOf(random().nextDouble() * cardinality);
for (int j = 12; j < docFields.length; ) {
docFields[j++] = "test_ss_dv";
docFields[j++] = String.valueOf(random().nextInt(cardinality));
docFields[j++] = "test_is_dv";
docFields[j++] = String.valueOf(random().nextInt(cardinality));
docFields[j++] = "test_ls_dv";
docFields[j++] = String.valueOf(random().nextInt(cardinality));
docFields[j++] = "test_fs_dv";
docFields[j++] = String.valueOf(random().nextFloat() * cardinality);
docFields[j++] = "test_ds_dv";
docFields[j++] = String.valueOf(random().nextDouble() * cardinality);
}
indexr(docFields);
if (random().nextInt(50) == 0) {
commit();
}
}
commit();
handle.clear();
handle.put("QTime", SKIPVAL);
handle.put("timestamp", SKIPVAL);
handle.put("maxScore", SKIPVAL);
for (int i = 0; i < atLeast(100); i++) {
doTestQuery(cardinality, fields);
}
}
/**
* Executes one query using interval faceting and compares with the same query using
* facet query with the same range
*/
private void doTestQuery(int cardinality, String[] fields) throws Exception {
String[] startOptions = new String[]{"(", "["};
String[] endOptions = new String[]{")", "]"};
// the query should match some documents in most cases
Integer[] qRange = getRandomRange(cardinality, "id");
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("q", "id:[" + qRange[0] + " TO " + qRange[1] + "]");
params.set("facet", "true");
params.set("rows", "0");
String field = fields[random().nextInt(fields.length)]; //choose from any of the fields
params.set("facet.interval", field);
// number of intervals
for (int i = 0; i < 1 + random().nextInt(20); i++) {
Integer[] interval = getRandomRange(cardinality, field);
String open = startOptions[interval[0] % 2];
String close = endOptions[interval[1] % 2];
params.add("f." + field + ".facet.interval.set", open + interval[0] + "," + interval[1] + close);
}
query(params);
}
/**
* Returns a random range. It's guaranteed that the first
* number will be lower than the second, and both of them
* between 0 (inclusive) and <code>max</code> (exclusive).
* If the fieldName is "test_s_dv" or "test_ss_dv" (the
* two fields used for Strings), the comparison will be done
* alphabetically
*/
private Integer[] getRandomRange(int max, String fieldName) {
Integer[] values = new Integer[2];
values[0] = random().nextInt(max);
values[1] = random().nextInt(max);
if ("test_s_dv".equals(fieldName) || "test_ss_dv".equals(fieldName)) {
Arrays.sort(values, new Comparator<Integer>() {
@Override
public int compare(Integer o1, Integer o2) {
return String.valueOf(o1).compareTo(String.valueOf(o2));
}
});
} else {
Arrays.sort(values);
}
return values;
}
}

View File

@ -317,7 +317,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
assertJQ(
req,
"/grouped=={'value1_s1':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':3,'start':0,'docs':[{'id':'1'}]}}]}}",
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',3,'b',2]},'facet_dates':{},'facet_ranges':{}}"
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',3,'b',2]},'facet_dates':{},'facet_ranges':{},'facet_intervals':{}}"
);
// Facet counts based on groups
@ -326,7 +326,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
assertJQ(
req,
"/grouped=={'value1_s1':{'matches':5,'groups':[{'groupValue':'1','doclist':{'numFound':3,'start':0,'docs':[{'id':'1'}]}}]}}",
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]},'facet_dates':{},'facet_ranges':{}}"
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]},'facet_dates':{},'facet_ranges':{},'facet_intervals':{}}"
);
// Facet counts based on groups and with group.func. This should trigger FunctionAllGroupHeadsCollector
@ -335,7 +335,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
assertJQ(
req,
"/grouped=={'strdist(1,value1_s1,edit)':{'matches':5,'groups':[{'groupValue':1.0,'doclist':{'numFound':3,'start':0,'docs':[{'id':'1'}]}}]}}",
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]},'facet_dates':{},'facet_ranges':{}}"
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]},'facet_dates':{},'facet_ranges':{},'facet_intervals':{}}"
);
// Facet counts based on groups without sort on an int field.
@ -344,7 +344,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
assertJQ(
req,
"/grouped=={'value4_i':{'matches':5,'groups':[{'groupValue':1,'doclist':{'numFound':3,'start':0,'docs':[{'id':'1'}]}}]}}",
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]},'facet_dates':{},'facet_ranges':{}}"
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]},'facet_dates':{},'facet_ranges':{},'facet_intervals':{}}"
);
// Multi select facets AND group.truncate=true
@ -353,7 +353,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
assertJQ(
req,
"/grouped=={'value4_i':{'matches':2,'groups':[{'groupValue':2,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'}]}}]}}",
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]},'facet_dates':{},'facet_ranges':{}}"
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]},'facet_dates':{},'facet_ranges':{},'facet_intervals':{}}"
);
// Multi select facets AND group.truncate=false
@ -362,7 +362,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
assertJQ(
req,
"/grouped=={'value4_i':{'matches':2,'groups':[{'groupValue':2,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'}]}}]}}",
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',3,'b',2]},'facet_dates':{},'facet_ranges':{}}"
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',3,'b',2]},'facet_dates':{},'facet_ranges':{},'facet_intervals':{}}"
);
// Multi select facets AND group.truncate=true
@ -371,7 +371,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
assertJQ(
req,
"/grouped=={'sub(value4_i,1)':{'matches':2,'groups':[{'groupValue':1.0,'doclist':{'numFound':2,'start':0,'docs':[{'id':'3'}]}}]}}",
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]},'facet_dates':{},'facet_ranges':{}}"
"/facet_counts=={'facet_queries':{},'facet_fields':{'value3_s1':['a',1,'b',1]},'facet_dates':{},'facet_ranges':{},'facet_intervals':{}}"
);
}
@ -394,7 +394,7 @@ public class TestGroupingSearch extends SolrTestCaseJ4 {
assertJQ(
req,
"/grouped=={'cat_sI':{'matches':2,'groups':[{'groupValue':'a','doclist':{'numFound':1,'start':0,'docs':[{'id':'5'}]}}]}}",
"/facet_counts=={'facet_queries':{'LW1':2,'LM1':2,'LM3':2},'facet_fields':{},'facet_dates':{},'facet_ranges':{}}"
"/facet_counts=={'facet_queries':{'LW1':2,'LM1':2,'LM3':2},'facet_fields':{},'facet_dates':{},'facet_ranges':{},'facet_intervals':{}}"
);
}

View File

@ -0,0 +1,815 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.request;
import java.util.Arrays;
import java.util.Comparator;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.request.IntervalFacets.FacetInterval;
import org.apache.solr.request.IntervalFacets.IntervalCompareResult;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SyntaxError;
import org.apache.solr.util.RefCounted;
import org.junit.BeforeClass;
import org.junit.Test;
@LuceneTestCase.SuppressCodecs({"Lucene40", "Lucene41", "Lucene42", "Lucene43"})
public class TestIntervalFaceting extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeTests() throws Exception {
initCore("solrconfig-basic.xml", "schema-docValuesFaceting.xml");
}
@Override
public void tearDown() throws Exception {
assertU(delQ("*:*"));
assertU(commit());
assertU(optimize());
assertQ(req("*:*"), "//*[@numFound='0']");
super.tearDown();
}
@Test
public void testMultiValueFields() {
assertU(adoc("id", "1", "test_ss_dv", "dog"));
assertU(adoc("id", "2", "test_ss_dv", "cat"));
assertU(adoc("id", "3", "test_ss_dv", "bird"));
assertU(adoc("id", "4", "test_ss_dv", "turtle"));
assertU(commit());
assertU(adoc("id", "5", "test_ss_dv", "\\goodbye,"));
assertU(adoc("id", "6", "test_ss_dv", ",hello\\"));
assertU(adoc("id", "7", "test_ss_dv", "dog"));
assertU(commit());
assertU(adoc("id", "8", "test_ss_dv", "dog"));
assertU(adoc("id", "9", "test_ss_dv", "cat"));
assertU(adoc("id", "10"));
assertU(commit());
assertMultipleReaders();
assertIntervalQueriesString("test_ss_dv");
assertU(delQ("*:*"));
assertU(commit());
assertU(optimize());
assertU(adoc("id", "1", "test_ss_dv", "dog", "test_ss_dv", "cat"));
assertU(adoc("id", "2", "test_ss_dv", "cat", "test_ss_dv", "bird"));
assertU(commit());
assertIntervalQuery("test_ss_dv", "[hello,hello]", "0");
assertIntervalQuery("test_ss_dv", "[dog,dog]", "1");
assertIntervalQuery("test_ss_dv", "[cat,cat]", "2");
assertIntervalQuery("test_ss_dv", "[*,*]", "2", "[*,cat)", "1", "[cat,dog)", "2", "[dog,*)", "1");
}
@Test
public void testMultipleSegments() {
assertU(adoc("id", "1", "test_s_dv", "dog"));
assertU(adoc("id", "2", "test_s_dv", "cat"));
assertU(adoc("id", "3", "test_s_dv", "bird"));
assertU(adoc("id", "4", "test_s_dv", "turtle"));
assertU(commit());
assertU(adoc("id", "5", "test_s_dv", "\\goodbye,"));
assertU(adoc("id", "6", "test_s_dv", ",hello\\"));
assertU(adoc("id", "7", "test_s_dv", "dog"));
assertU(commit());
assertU(adoc("id", "8", "test_s_dv", "dog"));
assertU(adoc("id", "9", "test_s_dv", "cat"));
assertU(adoc("id", "10"));
assertU(commit());
assertMultipleReaders();
assertIntervalQueriesString("test_s_dv");
}
@Test
public void testMultipleTerms() {
assertU(adoc("id", "1", "test_s_dv", "Buenos Aires"));
assertU(adoc("id", "2", "test_s_dv", "New York"));
assertU(adoc("id", "3", "test_s_dv", "Los Angeles"));
assertU(adoc("id", "4", "test_s_dv", "San Francisco"));
assertU(adoc("id", "5", "test_s_dv", "Las Vegas"));
assertU(adoc("id", "6", "test_s_dv", "São Paulo"));
assertU(adoc("id", "10"));
assertU(commit());
assertIntervalQuery("test_s_dv", "[*,*]", "6");
assertIntervalQuery("test_s_dv", "[A,C]", "1");
assertIntervalQuery("test_s_dv", "[Buenos Aires,Buenos Aires]", "1");
assertIntervalQuery("test_s_dv", "[Las,Los]", "1");
assertIntervalQuery("test_s_dv", "[Las,Los Angeles]", "2");
assertIntervalQuery("test_s_dv", "[Las,Los Angeles)", "1");
assertIntervalQuery("test_s_dv", "[Las\\,,Los Angeles]", "1");
}
private void assertMultipleReaders() {
RefCounted<SolrIndexSearcher> searcherRef = h.getCore().getSearcher();
try {
SolrIndexSearcher searcher = searcherRef.get();
int numReaders = searcher.getTopReaderContext().leaves().size();
assertTrue("Expected multiple reader leaves. Found " + numReaders, numReaders >= 2);
} finally {
searcherRef.decref();
}
}
@Test
public void testBasic() {
assertU(adoc("id", "1", "test_s_dv", "dog"));
assertU(adoc("id", "2", "test_s_dv", "cat"));
assertU(adoc("id", "3", "test_s_dv", "bird"));
assertU(adoc("id", "4", "test_s_dv", "turtle"));
assertU(adoc("id", "5", "test_s_dv", "\\goodbye,"));
assertU(adoc("id", "6", "test_s_dv", ",hello\\"));
assertU(adoc("id", "7", "test_s_dv", "dog"));
assertU(adoc("id", "8", "test_s_dv", "dog"));
assertU(adoc("id", "9", "test_s_dv", "cat"));
assertU(adoc("id", "10"));
assertU(commit());
assertIntervalQueriesString("test_s_dv");
// error cases
assertQEx("missing beginning of range",
req("fl", "test_s_dv", "q", "*:*", "facet", "true", "facet.interval", "test_s_dv",
"f.test_s_dv.facet.interval.set", "bird,bird]"),
SolrException.ErrorCode.BAD_REQUEST
);
assertQEx("only separator is escaped",
req("fl", "test_s_dv", "q", "*:*", "facet", "true", "facet.interval", "test_s_dv",
"f.test_s_dv.facet.interval.set", "(bird\\,turtle]"),
SolrException.ErrorCode.BAD_REQUEST
);
assertQEx("missing separator",
req("fl", "test_s_dv", "q", "*:*", "facet", "true", "facet.interval", "test_s_dv",
"f.test_s_dv.facet.interval.set", "(bird]"),
SolrException.ErrorCode.BAD_REQUEST
);
assertQEx("missing end of range",
req("fl", "test_s_dv", "q", "*:*", "facet", "true", "facet.interval", "test_s_dv",
"f.test_s_dv.facet.interval.set", "(bird,turtle"),
SolrException.ErrorCode.BAD_REQUEST
);
}
@Test
public void testMultipleFields() {
assertU(adoc("id", "1", "test_s_dv", "dog", "test_l_dv", "1"));
assertU(adoc("id", "2", "test_s_dv", "cat", "test_l_dv", "2"));
assertU(adoc("id", "3", "test_s_dv", "bird", "test_l_dv", "3"));
assertU(adoc("id", "4", "test_s_dv", "turtle", "test_l_dv", "4"));
assertU(adoc("id", "5", "test_s_dv", "\\goodbye,", "test_l_dv", "5"));
assertU(adoc("id", "6", "test_s_dv", ",hello\\", "test_l_dv", "6"));
assertU(adoc("id", "7", "test_s_dv", "dog", "test_l_dv", "7"));
assertU(adoc("id", "8", "test_s_dv", "dog", "test_l_dv", "8"));
assertU(adoc("id", "9", "test_s_dv", "cat", "test_l_dv", "9"));
assertU(adoc("id", "10"));
assertU(commit());
assertQ(req("q", "*:*", "facet", "true", "facet.interval", "test_s_dv",
"facet.interval", "test_l_dv", "f.test_s_dv.facet.interval.set", "[cat,dog]",
"f.test_l_dv.facet.interval.set", "[3,6]",
"f.test_l_dv.facet.interval.set", "[5,9]"),
"//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[cat,dog]'][.=5]",
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,6]'][.=4]",
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[5,9]'][.=5]");
}
@Test
@Slow
public void testRandom() throws Exception {
// All field values will be a number between 0 and cardinality
int cardinality = 1000000;
// Fields to use for interval faceting
String[] fields = new String[]{"test_s_dv", "test_i_dv", "test_l_dv", "test_f_dv", "test_d_dv",
"test_ss_dv", "test_is_dv", "test_fs_dv", "test_ls_dv", "test_ds_dv"};
for (int i = 0; i < atLeast(5000); i++) {
if (random().nextInt(50) == 0) {
//have some empty docs
assertU(adoc("id", String.valueOf(i)));
continue;
}
if (random().nextInt(100) == 0 && i > 0) {
//delete some docs
assertU(delI(String.valueOf(i - 1)));
}
String[] docFields = new String[(random().nextInt(5)) * 10 + 12];
docFields[0] = "id";
docFields[1] = String.valueOf(i);
docFields[2] = "test_s_dv";
docFields[3] = String.valueOf(random().nextInt(cardinality));
docFields[4] = "test_i_dv";
docFields[5] = String.valueOf(random().nextInt(cardinality));
docFields[6] = "test_l_dv";
docFields[7] = String.valueOf(random().nextInt(cardinality));
docFields[8] = "test_f_dv";
docFields[9] = String.valueOf(random().nextFloat() * cardinality);
docFields[10] = "test_d_dv";
docFields[11] = String.valueOf(random().nextDouble() * cardinality);
for (int j = 12; j < docFields.length; ) {
docFields[j++] = "test_ss_dv";
docFields[j++] = String.valueOf(random().nextInt(cardinality));
docFields[j++] = "test_is_dv";
docFields[j++] = String.valueOf(random().nextInt(cardinality));
docFields[j++] = "test_ls_dv";
docFields[j++] = String.valueOf(random().nextInt(cardinality));
docFields[j++] = "test_fs_dv";
docFields[j++] = String.valueOf(random().nextFloat() * cardinality);
docFields[j++] = "test_ds_dv";
docFields[j++] = String.valueOf(random().nextDouble() * cardinality);
}
assertU(adoc(docFields));
if (random().nextInt(50) == 0) {
assertU(commit());
}
}
assertU(commit());
for (int i = 0; i < atLeast(1000); i++) {
doTestQuery(cardinality, fields);
}
}
/**
* Executes one query using interval faceting and compares with the same query using
* facet query with the same range
*/
@SuppressWarnings("unchecked")
private void doTestQuery(int cardinality, String[] fields) throws Exception {
String[] startOptions = new String[]{"(", "["};
String[] endOptions = new String[]{")", "]"};
// the query should match some documents in most cases
Integer[] qRange = getRandomRange(cardinality, "id");
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("q", "id:[" + qRange[0] + " TO " + qRange[1] + "]");
params.set("facet", "true");
String field = fields[random().nextInt(fields.length)]; //choose from any of the fields
params.set("facet.interval", field);
// number of intervals
for (int i = 0; i < 1 + random().nextInt(20); i++) {
Integer[] interval = getRandomRange(cardinality, field);
String open = startOptions[interval[0] % 2];
String close = endOptions[interval[1] % 2];
params.add("f." + field + ".facet.interval.set", open + interval[0] + "," + interval[1] + close);
params.add("facet.query", field + ":" + open.replace('(', '{') + interval[0] + " TO " + interval[1] + close.replace(')', '}'));
}
SolrQueryRequest req = req(params);
try {
SolrQueryResponse rsp = h.queryAndResponse("standard", req);
NamedList<Object> facetQueries = (NamedList<Object>) ((NamedList<Object>) rsp.getValues().get("facet_counts")).get("facet_queries");
NamedList<Object> facetIntervals = (NamedList<Object>) ((NamedList<Object>) (NamedList<Object>) ((NamedList<Object>) rsp.getValues().get("facet_counts"))
.get("facet_intervals")).get(field);
assertEquals("Responses don't have the same number of facets: \n" + facetQueries + "\n" + facetIntervals,
facetQueries.size(), facetIntervals.size());
for (int i = 0; i < facetIntervals.size(); i++) {
assertEquals("Interval did not match: " + facetIntervals.getName(i), facetIntervals.getVal(i).toString(),
facetQueries.get(field + ":" + facetIntervals.getName(i).replace(",", " TO ").replace('(', '{').replace(')', '}')).toString());
}
} finally {
req.close();
}
}
/**
* Returns a random range. It's guaranteed that the first
* number will be lower than the second, and both of them
* between 0 (inclusive) and <code>max</code> (exclusive).
* If the fieldName is "test_s_dv" or "test_ss_dv" (the
* two fields used for Strings), the comparison will be done
* alphabetically
*/
private Integer[] getRandomRange(int max, String fieldName) {
Integer[] values = new Integer[2];
values[0] = random().nextInt(max);
values[1] = random().nextInt(max);
if ("test_s_dv".equals(fieldName) || "test_ss_dv".equals(fieldName)) {
Arrays.sort(values, new Comparator<Integer>() {
@Override
public int compare(Integer o1, Integer o2) {
return String.valueOf(o1).compareTo(String.valueOf(o2));
}
});
} else {
Arrays.sort(values);
}
return values;
}
@Test
public void testParse() throws SyntaxError {
assertInterval("test_l_dv", "(0,2)", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "(0,2]", new long[]{1, 2}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "[0,2]", new long[]{0, 1, 2}, new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "[0,2)", new long[]{0, 1}, new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "(0,*)", new long[]{1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{-1, 0, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{});
assertInterval("test_l_dv", "(*,2)", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1}, new long[]{}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "(*,*)", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{}, new long[]{});
assertInterval("test_l_dv", "[0,*]", new long[]{0, 1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{});
assertInterval("test_l_dv", "[*,2]", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1, 2}, new long[]{}, new long[]{3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "[*,*]", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE, 0, 1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{}, new long[]{});
assertInterval("test_l_dv", "(2,2)", new long[]{}, new long[]{2, 1, 0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "(0,0)", new long[]{}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "(0," + Long.MAX_VALUE + "]", new long[]{1, 2, 3, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{});
assertInterval("test_l_dv", "(0," + Long.MAX_VALUE + ")", new long[]{1, 2, 3, Integer.MAX_VALUE}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{Long.MAX_VALUE});
assertInterval("test_l_dv", "(" + Long.MIN_VALUE + ",0)", new long[]{-1, Integer.MIN_VALUE}, new long[]{Long.MIN_VALUE}, new long[]{1, 2, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "[" + Long.MIN_VALUE + ",0)", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{}, new long[]{1, 2, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "[" + Long.MIN_VALUE + "," + Long.MAX_VALUE + "]", new long[]{-1, Integer.MIN_VALUE, Long.MIN_VALUE, 1, 2, Integer.MAX_VALUE, Long.MAX_VALUE}, new long[]{}, new long[]{});
assertInterval("test_l_dv", "(" + Long.MIN_VALUE + "," + Long.MAX_VALUE + ")", new long[]{-1, Integer.MIN_VALUE, 1, 2, Integer.MAX_VALUE}, new long[]{Long.MIN_VALUE}, new long[]{Long.MAX_VALUE});
assertInterval("test_l_dv", "( 0,2)", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "( 0,2)", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "(0, 2)", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "( 0 , 2 )", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", " ( 0 , 2 ) ", new long[]{1}, new long[]{0, -1, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertInterval("test_l_dv", "[-1,1]", new long[]{-1, 0, 1}, new long[]{-2, Integer.MIN_VALUE, Long.MIN_VALUE}, new long[]{2, 3, Integer.MAX_VALUE, Long.MAX_VALUE});
assertStringInterval("test_s_dv", "[A,B]", "A", "B");
assertStringInterval("test_s_dv", "[A,b]", "A", "b");
assertStringInterval("test_s_dv", "[A\\,,B]", "A,", "B");
assertStringInterval("test_s_dv", "[A\\),B]", "A)", "B");
assertStringInterval("test_s_dv", "['A',B]", "'A'", "B");
assertStringInterval("test_s_dv", "[\"A\",B]", "\"A\"", "B");
assertStringInterval("test_s_dv", "[A B C,B]", "A B C", "B");
assertStringInterval("test_s_dv", "[ A B C ,B]", "A B C", "B");
// These two are currently not possible
// assertStringInterval("test_s_dv", "[\\ A B C ,B]", " A B C", "B");
// assertStringInterval("test_s_dv", "[\\*,B]", "*", "B");
//invalid intervals
assertBadInterval("test_l_dv", "0,2)", "Invalid start character");
assertBadInterval("test_l_dv", "{0,2)", "Invalid start character");
assertBadInterval("test_l_dv", "(0,2", "Invalid end character");
assertBadInterval("test_l_dv", "(0,2}", "Invalid end character");
assertBadInterval("test_l_dv", "(0, )", "Empty interval limit");
assertBadInterval("test_l_dv", "(0)", "Missing unescaped comma separating interval");
assertBadInterval("test_l_dv", "(,0)", "Empty interval limit");
assertBadInterval("test_l_dv", "(0 2)", "Missing unescaped comma separating interval");
assertBadInterval("test_l_dv", "(0 TO 2)", "Missing unescaped comma separating interval");
assertBadInterval("test_l_dv", "(0 \\, 2)", "Missing unescaped comma separating interval");
assertBadInterval("test_l_dv", "(A, 2)", "Invalid start interval for key");
assertBadInterval("test_l_dv", "(2, A)", "Invalid end interval for key");
assertBadInterval("test_l_dv", "(0,)", "Empty interval limit");
assertBadInterval("test_l_dv", "(0,-1)", "Start is higher than end in interval for key");
assertBadInterval("test_s_dv", "A,B)", "Invalid start character");
assertBadInterval("test_s_dv", "(B,A)", "Start is higher than end in interval for key");
assertBadInterval("test_s_dv", "(a,B)", "Start is higher than end in interval for key");
assertU(adoc("id", "1", "test_s_dv", "dog", "test_l_dv", "1"));
assertU(adoc("id", "2", "test_s_dv", "cat", "test_l_dv", "2"));
assertU(adoc("id", "3", "test_s_dv", "bird", "test_l_dv", "3"));
assertU(adoc("id", "4", "test_s_dv", "turtle", "test_l_dv", "4"));
assertU(adoc("id", "5", "test_s_dv", "\\goodbye,", "test_l_dv", "5"));
assertU(adoc("id", "6", "test_s_dv", ",hello\\", "test_l_dv", "6"));
assertU(adoc("id", "7", "test_s_dv", "dog", "test_l_dv", "7"));
assertU(adoc("id", "8", "test_s_dv", "dog", "test_l_dv", "8"));
assertU(adoc("id", "9", "test_s_dv", "cat", "test_l_dv", "9"));
assertU(adoc("id", "10"));
assertU(commit());
// facet.interval not set
assertQ(req("q", "*:*", "facet", "true",
"f.test_s_dv.facet.interval.set", "[cat,dog]",
"f.test_l_dv.facet.interval.set", "[3,6]",
"f.test_l_dv.facet.interval.set", "[5,9]"),
"count(//lst[@name='facet_intervals']/lst)=0");
// facet.interval only on one of the fields
assertQ(req("q", "*:*", "facet", "true",
"facet.interval", "test_s_dv",
"f.test_s_dv.facet.interval.set", "[cat,dog]",
"f.test_l_dv.facet.interval.set", "[3,6]",
"f.test_l_dv.facet.interval.set", "[5,9]"),
"count(//lst[@name='facet_intervals']/lst)=1",
"//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[cat,dog]'][.=5]");
// existing fields in facet.interval with no intervals defined
assertQEx("Unexpected exception",
"Missing required parameter: f.test_l_dv.facet.interval.set (or default: facet.interval.set)",
req("q", "*:*", "facet", "true",
"facet.interval", "test_s_dv",
"facet.interval", "test_l_dv",
"f.test_s_dv.facet.interval.set", "[cat,dog]"),
SolrException.ErrorCode.BAD_REQUEST);
// use of facet.interval.set
assertQ(req("q", "*:*", "facet", "true",
"facet.interval", "test_s_dv",
"facet.interval", "test_l_dv",
"facet.interval.set", "[1,2]"),
"count(//lst[@name='facet_intervals']/lst)=2",
"//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[1,2]'][.=0]",
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[1,2]'][.=2]"
);
// multiple facet.interval.set
assertQ(req("q", "*:*", "facet", "true",
"facet.interval", "test_s_dv",
"facet.interval", "test_l_dv",
"facet.interval.set", "[1,2]",
"facet.interval.set", "[2,3]",
"facet.interval.set", "[3,4]"),
"count(//lst[@name='facet_intervals']/lst)=2",
"//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[1,2]'][.=0]",
"//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[2,3]'][.=0]",
"//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[3,4]'][.=0]",
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[1,2]'][.=2]",
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[2,3]'][.=2]",
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,4]'][.=2]"
);
// use of facet.interval.set and override
assertQ(req("q", "*:*", "facet", "true",
"facet.interval", "test_s_dv",
"facet.interval", "test_l_dv",
"facet.interval.set", "[1,2]",
"f.test_l_dv.facet.interval.set", "[3,4]",
"f.test_l_dv.facet.interval.set", "[4,5]"),
"count(//lst[@name='facet_intervals']/lst)=2",
"//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[1,2]'][.=0]",
"count(//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int)=2", // interval [1,2] not present
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,4]'][.=2]",
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[4,5]'][.=2]"
);
assertQ(req("q", "*:*", "facet", "true",
"facet.interval", "test_s_dv",
"facet.interval", "test_l_dv",
"facet.interval.set", "[1,2]",
"facet.interval.set", "[2,3]",
"facet.interval.set", "[3,4]",
"f.test_s_dv.facet.interval.set", "[cat,dog]"),
"count(//lst[@name='facet_intervals']/lst)=2",
"count(//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int)=1", // only [cat,dog] in test_s_dv
"//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[cat,dog]'][.=5]",
"count(//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int)=3",
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[1,2]'][.=2]",
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[2,3]'][.=2]",
"//lst[@name='facet_intervals']/lst[@name='test_l_dv']/int[@name='[3,4]'][.=2]"
);
// use of facet.interval.set with wrong field type
assertQEx("Unexpected Exception",
"Invalid start interval",
req("q", "*:*", "facet", "true",
"facet.interval", "test_l_dv",
"f.test_l_dv.facet.interval.set", "[cat,dog]"),
SolrException.ErrorCode.BAD_REQUEST);
}
private void assertStringInterval(String fieldName, String intervalStr,
String expectedStart, String expectedEnd) throws SyntaxError {
SchemaField f = h.getCore().getLatestSchema().getField(fieldName);
FacetInterval interval = new FacetInterval(f, intervalStr);
assertEquals("Expected start " + expectedStart + " but found " + f.getType().toObject(f, interval.start),
interval.start, new BytesRef(f.getType().toInternal(expectedStart)));
assertEquals("Expected end " + expectedEnd + " but found " + f.getType().toObject(f, interval.end),
interval.end, new BytesRef(f.getType().toInternal(expectedEnd)));
}
private void assertBadInterval(String fieldName, String intervalStr, String errorMsg) {
SchemaField f = h.getCore().getLatestSchema().getField(fieldName);
try {
new FacetInterval(f, intervalStr);
fail("Expecting SyntaxError for interval String: " + intervalStr);
} catch (SyntaxError e) {
assertTrue("Unexpected error message for interval String: " + intervalStr + ": " +
e.getMessage(), e.getMessage().contains(errorMsg));
}
}
private void assertInterval(String fieldName, String intervalStr, long[] included, long[] lowerThanStart, long[] graterThanEnd) throws SyntaxError {
SchemaField f = h.getCore().getLatestSchema().getField(fieldName);
FacetInterval interval = new FacetInterval(f, intervalStr);
for (long l : included) {
assertEquals("Value " + l + " should be INCLUDED for interval" + interval,
IntervalCompareResult.INCLUDED, interval.includes(l));
}
for (long l : lowerThanStart) {
assertEquals("Value " + l + " should be LOWER_THAN_START for inteval " + interval,
IntervalCompareResult.LOWER_THAN_START, interval.includes(l));
}
for (long l : graterThanEnd) {
assertEquals("Value " + l + " should be GRATER_THAN_END for inteval " + interval,
IntervalCompareResult.GREATER_THAN_END, interval.includes(l));
}
}
@Test
public void testLongFields() {
assertU(adoc("id", "1", "test_l_dv", "0"));
assertU(adoc("id", "2", "test_l_dv", "1"));
assertU(adoc("id", "3", "test_l_dv", "2"));
assertU(adoc("id", "4", "test_l_dv", "3"));
assertU(adoc("id", "5", "test_l_dv", "4"));
assertU(adoc("id", "6", "test_l_dv", "5"));
assertU(adoc("id", "7", "test_l_dv", "6"));
assertU(adoc("id", "8", "test_l_dv", "7"));
assertU(adoc("id", "9", "test_l_dv", "8"));
assertU(adoc("id", "10"));
assertU(adoc("id", "11", "test_l_dv", "10"));
assertU(commit());
assertIntervalQueriesNumeric("test_l_dv");
assertU(adoc("id", "12", "test_l_dv", String.valueOf(Long.MAX_VALUE - 3)));
assertU(adoc("id", "13", "test_l_dv", String.valueOf(Long.MAX_VALUE - 2)));
assertU(adoc("id", "14", "test_l_dv", String.valueOf(Long.MAX_VALUE - 1)));
assertU(commit());
assertIntervalQuery("test_l_dv", "[0," + Integer.MAX_VALUE + "]", "10");
assertIntervalQuery("test_l_dv", "[" + Integer.MAX_VALUE + "," + Long.MAX_VALUE + "]", "3");
assertIntervalQuery("test_l_dv", "[" + Integer.MAX_VALUE + ",*]", "3");
}
@Test
public void testFloatFields() {
doTestFloat("test_f_dv");
}
private void doTestFloat(String field) {
assertU(adoc("id", "1", field, "0"));
assertU(adoc("id", "2", field, "1"));
assertU(adoc("id", "3", field, "2"));
assertU(adoc("id", "4", field, "3"));
assertU(adoc("id", "5", field, "4"));
assertU(adoc("id", "6", field, "5"));
assertU(adoc("id", "7", field, "6"));
assertU(adoc("id", "8", field, "7"));
assertU(adoc("id", "9", field, "8"));
assertU(adoc("id", "10"));
assertU(adoc("id", "11", field, "10"));
assertU(commit());
assertIntervalQueriesNumeric(field);
assertU(adoc("id", "12", field, "1.3"));
assertU(adoc("id", "13", field, "4.5"));
assertU(adoc("id", "14", field, "6.7"));
assertU(adoc("id", "15", field, "123.45"));
assertU(commit());
assertIntervalQuery(field, "[0," + Integer.MAX_VALUE + "]", "14");
assertIntervalQuery(field, "[0,1]", "2");
assertIntervalQuery(field, "[0,2]", "4");
assertIntervalQuery(field, "(1,2)", "1");
assertIntervalQuery(field, "(1,1)", "0");
assertIntervalQuery(field, "(4,7)", "4");
assertIntervalQuery(field, "(123,*)", "1");
}
@Test
public void testDoubleFields() {
doTestFloat("test_d_dv");
}
@Test
public void testIntFields() {
assertU(adoc("id", "1", "test_i_dv", "0"));
assertU(adoc("id", "2", "test_i_dv", "1"));
assertU(adoc("id", "3", "test_i_dv", "2"));
assertU(adoc("id", "4", "test_i_dv", "3"));
assertU(adoc("id", "5", "test_i_dv", "4"));
assertU(adoc("id", "6", "test_i_dv", "5"));
assertU(adoc("id", "7", "test_i_dv", "6"));
assertU(adoc("id", "8", "test_i_dv", "7"));
assertU(adoc("id", "9", "test_i_dv", "8"));
assertU(adoc("id", "10"));
assertU(adoc("id", "11", "test_i_dv", "10"));
assertU(commit());
assertIntervalQueriesNumeric("test_i_dv");
}
@Test
public void testIntFieldsMultipleSegments() {
assertU(adoc("id", "1", "test_i_dv", "0"));
assertU(adoc("id", "2", "test_i_dv", "1"));
assertU(adoc("id", "3", "test_i_dv", "2"));
assertU(commit());
assertU(adoc("id", "4", "test_i_dv", "3"));
assertU(adoc("id", "5", "test_i_dv", "4"));
assertU(adoc("id", "6", "test_i_dv", "5"));
assertU(adoc("id", "7", "test_i_dv", "6"));
assertU(commit());
assertU(adoc("id", "8", "test_i_dv", "7"));
assertU(adoc("id", "9", "test_i_dv", "8"));
assertU(adoc("id", "10"));
assertU(adoc("id", "11", "test_i_dv", "10"));
assertU(commit());
assertMultipleReaders();
assertIntervalQueriesNumeric("test_i_dv");
}
@Test
public void testIntMultivaluedFields() {
assertU(adoc("id", "1", "test_is_dv", "0"));
assertU(adoc("id", "2", "test_is_dv", "1"));
assertU(adoc("id", "3", "test_is_dv", "2"));
assertU(adoc("id", "4", "test_is_dv", "3"));
assertU(adoc("id", "5", "test_is_dv", "4"));
assertU(adoc("id", "6", "test_is_dv", "5"));
assertU(adoc("id", "7", "test_is_dv", "6"));
assertU(adoc("id", "8", "test_is_dv", "7"));
assertU(adoc("id", "9", "test_is_dv", "8"));
assertU(adoc("id", "10"));
assertU(adoc("id", "11", "test_is_dv", "10"));
assertU(commit());
assertIntervalQueriesNumeric("test_is_dv");
assertU(adoc("id", "1", "test_is_dv", "0", "test_is_dv", "1", "test_is_dv", "2", "test_is_dv", "3"));
assertU(adoc("id", "2", "test_is_dv", "1", "test_is_dv", "2", "test_is_dv", "3", "test_is_dv", "4"));
assertU(adoc("id", "3", "test_is_dv", "2", "test_is_dv", "3", "test_is_dv", "4", "test_is_dv", "5"));
assertU(adoc("id", "4", "test_is_dv", "3", "test_is_dv", "4", "test_is_dv", "5", "test_is_dv", "6"));
assertU(commit());
assertIntervalQuery("test_is_dv", "[1,3]", "4");
assertIntervalQuery("test_is_dv", "[3,3]", "4");
assertIntervalQuery("test_is_dv", "[5,9]", "6");
assertIntervalQuery("test_is_dv", "(5,9)", "4");
assertIntervalQuery("test_is_dv", "[*,*]", "10");
}
@Test
public void testDateFields() {
assertU(adoc("id", "1", "test_dt_dv", "2013-01-01T00:00:00Z"));
assertU(adoc("id", "2", "test_dt_dv", "2013-01-02T00:00:00Z"));
assertU(adoc("id", "3", "test_dt_dv", "2013-02-01T00:00:00Z"));
assertU(adoc("id", "4", "test_dt_dv", "2014-01-01T00:00:00Z"));
assertU(adoc("id", "5", "test_dt_dv", "2100-01-01T00:00:00Z"));
assertU(adoc("id", "6", "test_dt_dv", "2013-01-01T10:00:00Z"));
assertU(adoc("id", "7", "test_dt_dv", "2013-01-01T00:10:00Z"));
assertU(adoc("id", "8", "test_dt_dv", "2013-01-01T00:00:10Z"));
assertU(adoc("id", "9"));
assertU(commit());
assertIntervalQuery("test_dt_dv", "[*,*]", "8");
assertIntervalQuery("test_dt_dv", "[*,2014-01-01T00:00:00Z]", "7");
assertIntervalQuery("test_dt_dv", "[*,2014-01-01T00:00:00Z)", "6");
assertIntervalQuery("test_dt_dv", "[*,2014-01-01T00:00:00.001Z)", "7");
assertIntervalQuery("test_dt_dv", "[*,2013-12-31T23:59:59.999Z]", "6");
assertIntervalQuery("test_dt_dv", "[2013-12-31T23:59:59.9999Z,2014-01-01T00:00:00.001Z]", "1");
assertIntervalQuery("test_dt_dv", "[NOW,*]", "1");
assertIntervalQuery("test_dt_dv", "[*,NOW]", "7");
assertU(adoc("id", "5", "test_dt_dv", "NOW"));
assertU(commit());
assertIntervalQuery("test_dt_dv", "[NOW/DAY-1DAY,NOW+2DAY]", "1");
}
@Test
public void testNonDocValueFields() {
// there is a copyField that will copy to the dv version of the field
assertU(adoc("id", "1", "test_s", "dog"));
assertU(adoc("id", "2", "test_s", "cat"));
assertU(adoc("id", "3", "test_s", "bird"));
assertU(adoc("id", "4", "test_s", "cat"));
assertU(commit());
assertQ(req("q", "*:*", "facet", "true", "facet.interval", "test_s_dv",
"facet.interval", "test_s_dv", "f.test_s_dv.facet.interval.set", "[cat,dog]"),
"//lst[@name='facet_intervals']/lst[@name='test_s_dv']/int[@name='[cat,dog]'][.=3]");
assertQEx("Interval Faceting only on fields with doc values",
req("q", "*:*", "facet", "true", "facet.interval", "test_s",
"f.test_s.facet.interval.set", "[cat,dog]"),
SolrException.ErrorCode.BAD_REQUEST
);
}
@Test
public void testWithDeletedDocs() {
assertU(adoc("id", "1", "test_s_dv", "dog"));
assertU(adoc("id", "2", "test_s_dv", "cat"));
assertU(adoc("id", "3", "test_s_dv", "bird"));
assertU(adoc("id", "16", "test_s_dv", "cat"));
assertU(adoc("id", "4", "test_s_dv", "turtle"));
assertU(adoc("id", "5", "test_s_dv", "\\goodbye,"));
assertU(adoc("id", "6", "test_s_dv", ",hello\\"));
assertU(adoc("id", "7", "test_s_dv", "dog"));
assertU(adoc("id", "15", "test_s_dv", "dog"));
assertU(adoc("id", "8", "test_s_dv", "dog"));
assertU(adoc("id", "9", "test_s_dv", "cat"));
assertU(adoc("id", "10"));
assertU(commit());
assertU(adoc("id", "11", "test_s_dv", "the"));
assertU(adoc("id", "12", "test_s_dv", "quick brown"));
assertU(adoc("id", "13", "test_s_dv", "fox"));
assertU(adoc("id", "14", "test_s_dv", "jumped over the lazy dog"));
assertU(commit());
assertU(delI("16"));
assertU(delI("15"));
assertU(delQ("id:[11 TO 14]"));
assertU(commit());
assertIntervalQueriesString("test_s_dv");
}
private void assertIntervalQueriesNumeric(String field) {
assertIntervalQuery(field, "[0,1]", "2");
assertIntervalQuery(field, "(0,2)", "1");
assertIntervalQuery(field, "[0,2)", "2");
assertIntervalQuery(field, "(0,2]", "2");
assertIntervalQuery(field, "[*,5]", "6");
assertIntervalQuery(field, "[*,3)", "3", "[2,5)", "3", "[6,8)", "2", "[3,*]", "7", "[10,10]", "1");
}
private void assertIntervalQueriesString(String field) {
assertIntervalQuery(field, "[bird,bird]", "1");
assertIntervalQuery(field, "(bird,dog)", "2");
assertIntervalQuery(field, "[bird,dog)", "3");
assertIntervalQuery(field, "(bird,turtle]", "6");
assertIntervalQuery(field, "[*,bird]", "3");
assertIntervalQuery(field, "[*,bird)", "2", "[bird,cat)", "1", "[cat,dog)", "2", "[dog,*]", "4");
assertIntervalQuery(field, "[*,*]", "9", "[*,dog)", "5", "[*,dog]", "8", "[dog,*]", "4");
assertIntervalQuery(field, field + ":dog", 3, "[*,*]", "3", "[*,dog)", "0", "[*,dog]", "3", "[dog,*]", "3", "[bird,cat]", "0");
}
/**
* Will run a match all query, and ask for interval facets in the specified field.
* The intervals facet are indicated in the <code>intervals</code> parameter, followed
* by the expected count result. For example:
* <code>assertIntervalQuery("my_string_field", "[0,10]", "3", "(20,*), "12");</code>
*
* @param field The field in which the interval facet should be asked
* @param intervals a variable array of intervals followed by the expected count (also a string)
*/
private void assertIntervalQuery(String field, String... intervals) {
assertIntervalQuery(field, "*:*", -1, intervals);
}
private void assertIntervalQuery(String field, String query, int resultCount, String... intervals) {
assert (intervals.length & 1) == 0;
int idx = 0;
String[] params = new String[intervals.length + 6];
params[idx++] = "q";
params[idx++] = query;
params[idx++] = "facet";
params[idx++] = "true";
params[idx++] = "facet.interval";
params[idx++] = field;
for (int i = 0; i < intervals.length; i += 2) {
params[idx++] = "f." + field + ".facet.interval.set";
params[idx++] = intervals[i];
}
String[] tests = new String[intervals.length / 2 + (resultCount > 0 ? 1 : 0)];
idx = 0;
for (int i = 0; i < intervals.length; i += 2) {
tests[idx++] = "//lst[@name='facet_intervals']/lst[@name='" + field + "']/int[@name='" + intervals[i] + "'][.=" + intervals[i + 1] + "]";
}
if (resultCount >= 0) {
tests[idx++] = "//*[@numFound='" + resultCount + "']";
}
assertQ("Unexpected facet iterval count. Field:" + field + ", Intervals: " + Arrays.toString(intervals) + "Query: " + query,
req(params), tests);
}
}

View File

@ -254,6 +254,15 @@ public interface FacetParams {
*/
public static final String FACET_RANGE_INCLUDE = FACET_RANGE + ".include";
/**
* Any field whose values the user wants to enumerate as explicit intervals of terms.
*/
public static final String FACET_INTERVAL = FACET + ".interval";
/**
* Set of terms for a single interval to facet on.
*/
public static final String FACET_INTERVAL_SET = FACET_INTERVAL + ".set";
/**
* An enumeration of the legal values for {@link #FACET_RANGE_OTHER} and {@link #FACET_DATE_OTHER} ...

View File

@ -852,6 +852,28 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
unIgnoreException(".");
}
}
/**
* Makes sure a query throws a SolrException with the listed response code and expected message
* @param failMessage The assert message to show when the query doesn't throw the expected exception
* @param exceptionMessage A substring of the message expected in the exception
* @param req Solr request
* @param code expected error code for the query
*/
public static void assertQEx(String failMessage, String exceptionMessage, SolrQueryRequest req, SolrException.ErrorCode code ) {
try {
ignoreException(".");
h.query(req);
fail( failMessage );
} catch (SolrException e) {
assertEquals( code.code, e.code() );
assertTrue("Unexpected error message. Expecting \"" + exceptionMessage +
"\" but got \"" + e.getMessage() + "\"", e.getMessage()!= null && e.getMessage().contains(exceptionMessage));
} catch (Exception e2) {
throw new RuntimeException("Exception during query", e2);
} finally {
unIgnoreException(".");
}
}
/**