Made sure `all_terms` works consistently. In some cases the `all_terms` option was ignored: * Faceting on number based fields. * The `execution_type` was set to `map`. * In the case the `fields` option was used.
Closes #2861
This commit is contained in:
parent
831ea789aa
commit
9b5c74d43e
|
@ -145,6 +145,11 @@ public class TermsFacetParser extends AbstractComponent implements FacetParser {
|
||||||
return new IndexNameFacetExecutor(context.shardTarget().index(), comparatorType, size);
|
return new IndexNameFacetExecutor(context.shardTarget().index(), comparatorType, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (fieldsNames != null && fieldsNames.length == 1) {
|
||||||
|
field = fieldsNames[0];
|
||||||
|
fieldsNames = null;
|
||||||
|
}
|
||||||
|
|
||||||
Pattern pattern = null;
|
Pattern pattern = null;
|
||||||
if (regex != null) {
|
if (regex != null) {
|
||||||
pattern = Regex.compile(regex, regexFlags);
|
pattern = Regex.compile(regex, regexFlags);
|
||||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.CacheRecycler;
|
||||||
import org.elasticsearch.common.collect.BoundedTreeSet;
|
import org.elasticsearch.common.collect.BoundedTreeSet;
|
||||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.script.SearchScript;
|
import org.elasticsearch.script.SearchScript;
|
||||||
import org.elasticsearch.search.facet.DoubleFacetAggregatorBase;
|
import org.elasticsearch.search.facet.DoubleFacetAggregatorBase;
|
||||||
import org.elasticsearch.search.facet.FacetExecutor;
|
import org.elasticsearch.search.facet.FacetExecutor;
|
||||||
|
@ -68,17 +69,42 @@ public class TermsDoubleFacetExecutor extends FacetExecutor {
|
||||||
|
|
||||||
this.facets = CacheRecycler.popDoubleIntMap();
|
this.facets = CacheRecycler.popDoubleIntMap();
|
||||||
|
|
||||||
// TODO: we need to support this with the new field data....
|
if (allTerms) {
|
||||||
// if (allTerms) {
|
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
||||||
// try {
|
int maxDoc = readerContext.reader().maxDoc();
|
||||||
// for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
DoubleValues values = indexFieldData.load(readerContext).getDoubleValues();
|
||||||
// DoubleFieldData fieldData = (DoubleFieldData) fieldDataCache.cache(fieldDataType, readerContext.reader(), indexFieldName);
|
if (values instanceof DoubleValues.WithOrdinals) {
|
||||||
// fieldData.forEachValue(aggregator);
|
DoubleValues.WithOrdinals valuesWithOrds = (DoubleValues.WithOrdinals) values;
|
||||||
// }
|
Ordinals.Docs ordinals = valuesWithOrds.ordinals();
|
||||||
// } catch (Exception e) {
|
for (int ord = 1; ord < ordinals.getMaxOrd(); ord++) {
|
||||||
// throw new FacetPhaseExecutionException(facetName, "failed to load all terms", e);
|
facets.putIfAbsent(valuesWithOrds.getValueByOrd(ord), 0);
|
||||||
// }
|
}
|
||||||
// }
|
} else {
|
||||||
|
// Shouldn't be true, otherwise it is WithOrdinals... just to be sure...
|
||||||
|
if (values.isMultiValued()) {
|
||||||
|
for (int docId = 0; docId < maxDoc; docId++) {
|
||||||
|
if (!values.hasValue(docId)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
DoubleValues.Iter iter = values.getIter(docId);
|
||||||
|
while (iter.hasNext()) {
|
||||||
|
facets.putIfAbsent(iter.next(), 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (int docId = 0; docId < maxDoc; docId++) {
|
||||||
|
if (!values.hasValue(docId)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
double value = values.getValue(docId);
|
||||||
|
facets.putIfAbsent(value, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -146,7 +172,7 @@ public class TermsDoubleFacetExecutor extends FacetExecutor {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc) throws IOException {
|
public void collect(int doc) throws IOException {
|
||||||
aggregator.onDoc(doc, values);
|
aggregator.onDoc(doc, values);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.common.CacheRecycler;
|
||||||
import org.elasticsearch.common.collect.BoundedTreeSet;
|
import org.elasticsearch.common.collect.BoundedTreeSet;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
import org.elasticsearch.index.fielddata.LongValues;
|
import org.elasticsearch.index.fielddata.LongValues;
|
||||||
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.script.SearchScript;
|
import org.elasticsearch.script.SearchScript;
|
||||||
import org.elasticsearch.search.facet.FacetExecutor;
|
import org.elasticsearch.search.facet.FacetExecutor;
|
||||||
import org.elasticsearch.search.facet.InternalFacet;
|
import org.elasticsearch.search.facet.InternalFacet;
|
||||||
|
@ -65,20 +66,44 @@ public class TermsLongFacetExecutor extends FacetExecutor {
|
||||||
this.comparatorType = comparatorType;
|
this.comparatorType = comparatorType;
|
||||||
this.script = script;
|
this.script = script;
|
||||||
this.excluded = excluded;
|
this.excluded = excluded;
|
||||||
|
|
||||||
this.facets = CacheRecycler.popLongIntMap();
|
this.facets = CacheRecycler.popLongIntMap();
|
||||||
|
|
||||||
// TODO: we need to support this with the new field data....
|
if (allTerms) {
|
||||||
// if (allTerms) {
|
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
||||||
// try {
|
int maxDoc = readerContext.reader().maxDoc();
|
||||||
// for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
LongValues values = indexFieldData.load(readerContext).getLongValues();
|
||||||
// LongFieldData fieldData = (LongFieldData) fieldDataCache.cache(fieldDataType, readerContext.reader(), indexFieldName);
|
if (values instanceof LongValues.WithOrdinals) {
|
||||||
// fieldData.forEachValue(aggregator);
|
LongValues.WithOrdinals valuesWithOrds = (LongValues.WithOrdinals) values;
|
||||||
// }
|
Ordinals.Docs ordinals = valuesWithOrds.ordinals();
|
||||||
// } catch (Exception e) {
|
for (int ord = 1; ord < ordinals.getMaxOrd(); ord++) {
|
||||||
// throw new FacetPhaseExecutionException(facetName, "failed to load all terms", e);
|
facets.putIfAbsent(valuesWithOrds.getValueByOrd(ord), 0);
|
||||||
// }
|
}
|
||||||
// }
|
} else {
|
||||||
|
// Shouldn't be true, otherwise it is WithOrdinals... just to be sure...
|
||||||
|
if (values.isMultiValued()) {
|
||||||
|
for (int docId = 0; docId < maxDoc; docId++) {
|
||||||
|
if (!values.hasValue(docId)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
LongValues.Iter iter = values.getIter(docId);
|
||||||
|
while (iter.hasNext()) {
|
||||||
|
facets.putIfAbsent(iter.next(), 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (int docId = 0; docId < maxDoc; docId++) {
|
||||||
|
if (!values.hasValue(docId)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
long value = values.getValue(docId);
|
||||||
|
facets.putIfAbsent(value, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -19,14 +19,10 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.facet.terms.strings;
|
package org.elasticsearch.search.facet.terms.strings;
|
||||||
|
|
||||||
import java.io.IOException;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.collect.BoundedTreeSet;
|
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
import org.elasticsearch.index.mapper.FieldMapper;
|
||||||
|
@ -34,12 +30,10 @@ import org.elasticsearch.script.SearchScript;
|
||||||
import org.elasticsearch.search.facet.FacetExecutor;
|
import org.elasticsearch.search.facet.FacetExecutor;
|
||||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
||||||
import org.elasticsearch.search.facet.InternalFacet;
|
import org.elasticsearch.search.facet.InternalFacet;
|
||||||
import org.elasticsearch.search.facet.terms.strings.HashedAggregator.BytesRefCountIterator;
|
|
||||||
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import java.io.IOException;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -54,7 +48,6 @@ public class FieldsTermsStringFacetExecutor extends FacetExecutor {
|
||||||
long missing;
|
long missing;
|
||||||
long total;
|
long total;
|
||||||
|
|
||||||
|
|
||||||
public FieldsTermsStringFacetExecutor(String facetName, String[] fieldsNames, int size, InternalStringTermsFacet.ComparatorType comparatorType, boolean allTerms, SearchContext context,
|
public FieldsTermsStringFacetExecutor(String facetName, String[] fieldsNames, int size, InternalStringTermsFacet.ComparatorType comparatorType, boolean allTerms, SearchContext context,
|
||||||
ImmutableSet<BytesRef> excluded, Pattern pattern, SearchScript script) {
|
ImmutableSet<BytesRef> excluded, Pattern pattern, SearchScript script) {
|
||||||
this.size = size;
|
this.size = size;
|
||||||
|
@ -74,19 +67,11 @@ public class FieldsTermsStringFacetExecutor extends FacetExecutor {
|
||||||
aggregator = new HashedScriptAggregator(excluded, pattern, script);
|
aggregator = new HashedScriptAggregator(excluded, pattern, script);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: we need to support this flag with the new field data...
|
if (allTerms) {
|
||||||
// if (allTerms) {
|
for (int i = 0; i < fieldsNames.length; i++) {
|
||||||
// try {
|
TermsStringFacetExecutor.loadAllTerms(context, indexFieldDatas[i], aggregator);
|
||||||
// for (int i = 0; i < fieldsNames.length; i++) {
|
}
|
||||||
// for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
}
|
||||||
// FieldData fieldData = fieldDataCache.cache(fieldsDataType[i], readerContext.reader(), indexFieldsNames[i]);
|
|
||||||
// fieldData.forEachValue(aggregator);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
// } catch (Exception e) {
|
|
||||||
// throw new FacetPhaseExecutionException(facetName, "failed to load all terms", e);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -107,7 +92,7 @@ public class FieldsTermsStringFacetExecutor extends FacetExecutor {
|
||||||
public Collector(HashedAggregator aggregator) {
|
public Collector(HashedAggregator aggregator) {
|
||||||
values = new BytesValues[indexFieldDatas.length];
|
values = new BytesValues[indexFieldDatas.length];
|
||||||
this.aggregator = aggregator;
|
this.aggregator = aggregator;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -18,8 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.facet.terms.strings;
|
package org.elasticsearch.search.facet.terms.strings;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
|
||||||
import org.apache.lucene.util.ArrayUtil;
|
import org.apache.lucene.util.ArrayUtil;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.BytesRefHash;
|
import org.apache.lucene.util.BytesRefHash;
|
||||||
|
@ -30,7 +29,7 @@ import org.elasticsearch.search.facet.InternalFacet;
|
||||||
import org.elasticsearch.search.facet.terms.TermsFacet;
|
import org.elasticsearch.search.facet.terms.TermsFacet;
|
||||||
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
|
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import java.util.Arrays;
|
||||||
|
|
||||||
public class HashedAggregator {
|
public class HashedAggregator {
|
||||||
private int missing;
|
private int missing;
|
||||||
|
@ -41,15 +40,15 @@ public class HashedAggregator {
|
||||||
public HashedAggregator() {
|
public HashedAggregator() {
|
||||||
this(new BytesRefHash());
|
this(new BytesRefHash());
|
||||||
}
|
}
|
||||||
|
|
||||||
public HashedAggregator(BytesRefHash hash) {
|
public HashedAggregator(BytesRefHash hash) {
|
||||||
this.hash = hash;
|
this.hash = hash;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void onDoc(int docId, BytesValues values) {
|
public void onDoc(int docId, BytesValues values) {
|
||||||
if (values.hasValue(docId)) {
|
if (values.hasValue(docId)) {
|
||||||
final Iter iter = values.getIter(docId);
|
final Iter iter = values.getIter(docId);
|
||||||
while(iter.hasNext()) {
|
while (iter.hasNext()) {
|
||||||
onValue(docId, iter.next(), iter.hash(), values);
|
onValue(docId, iter.next(), iter.hash(), values);
|
||||||
total++;
|
total++;
|
||||||
}
|
}
|
||||||
|
@ -57,15 +56,25 @@ public class HashedAggregator {
|
||||||
missing++;
|
missing++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void addValue(BytesRef value, int hashCode) {
|
||||||
|
int key = hash.add(value, hashCode);
|
||||||
|
if (key < 0) {
|
||||||
|
key = ((-key) - 1);
|
||||||
|
}
|
||||||
|
if (key >= counts.length) {
|
||||||
|
counts = ArrayUtil.grow(counts, key + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
protected BytesRef makesSafe(BytesRef ref, BytesValues values) {
|
protected BytesRef makesSafe(BytesRef ref, BytesValues values) {
|
||||||
return values.makeSafe(ref);
|
return values.makeSafe(ref);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void onValue(int docId, BytesRef value, int hashCode, BytesValues values) {
|
protected void onValue(int docId, BytesRef value, int hashCode, BytesValues values) {
|
||||||
int key = hash.add(value, hashCode);
|
int key = hash.add(value, hashCode);
|
||||||
if (key < 0) {
|
if (key < 0) {
|
||||||
key = ((-key)-1);
|
key = ((-key) - 1);
|
||||||
} else if (key >= counts.length) {
|
} else if (key >= counts.length) {
|
||||||
counts = ArrayUtil.grow(counts, key + 1);
|
counts = ArrayUtil.grow(counts, key + 1);
|
||||||
}
|
}
|
||||||
|
@ -79,25 +88,26 @@ public class HashedAggregator {
|
||||||
public final int total() {
|
public final int total() {
|
||||||
return total;
|
return total;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final boolean isEmpty() {
|
public final boolean isEmpty() {
|
||||||
return hash.size() == 0;
|
return hash.size() == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
public BytesRefCountIterator getIter() {
|
public BytesRefCountIterator getIter() {
|
||||||
return new BytesRefCountIterator();
|
return new BytesRefCountIterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public final class BytesRefCountIterator {
|
public final class BytesRefCountIterator {
|
||||||
final BytesRef spare = new BytesRef();
|
final BytesRef spare = new BytesRef();
|
||||||
private final int size;
|
private final int size;
|
||||||
private int current = 0;
|
private int current = 0;
|
||||||
private int currentCount = -1;
|
private int currentCount = -1;
|
||||||
|
|
||||||
BytesRefCountIterator() {
|
BytesRefCountIterator() {
|
||||||
this.size = hash.size();
|
this.size = hash.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
public BytesRef next() {
|
public BytesRef next() {
|
||||||
if (current < size) {
|
if (current < size) {
|
||||||
currentCount = counts[current];
|
currentCount = counts[current];
|
||||||
|
@ -112,8 +122,8 @@ public class HashedAggregator {
|
||||||
return currentCount;
|
return currentCount;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static InternalFacet buildFacet(String facetName, int size, long missing, long total, TermsFacet.ComparatorType comparatorType, HashedAggregator aggregator) {
|
public static InternalFacet buildFacet(String facetName, int size, long missing, long total, TermsFacet.ComparatorType comparatorType, HashedAggregator aggregator) {
|
||||||
if (aggregator.isEmpty()) {
|
if (aggregator.isEmpty()) {
|
||||||
return new InternalStringTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalStringTermsFacet.TermEntry>of(), missing, total);
|
return new InternalStringTermsFacet(facetName, comparatorType, size, ImmutableList.<InternalStringTermsFacet.TermEntry>of(), missing, total);
|
||||||
} else {
|
} else {
|
||||||
|
@ -121,7 +131,7 @@ public class HashedAggregator {
|
||||||
EntryPriorityQueue ordered = new EntryPriorityQueue(size, comparatorType.comparator());
|
EntryPriorityQueue ordered = new EntryPriorityQueue(size, comparatorType.comparator());
|
||||||
BytesRefCountIterator iter = aggregator.getIter();
|
BytesRefCountIterator iter = aggregator.getIter();
|
||||||
BytesRef next = null;
|
BytesRef next = null;
|
||||||
while((next = iter.next()) != null) {
|
while ((next = iter.next()) != null) {
|
||||||
ordered.insertWithOverflow(new InternalStringTermsFacet.TermEntry(BytesRef.deepCopyOf(next), iter.count()));
|
ordered.insertWithOverflow(new InternalStringTermsFacet.TermEntry(BytesRef.deepCopyOf(next), iter.count()));
|
||||||
// maybe we can survive with a 0-copy here if we keep the bytes ref hash around?
|
// maybe we can survive with a 0-copy here if we keep the bytes ref hash around?
|
||||||
}
|
}
|
||||||
|
@ -134,7 +144,7 @@ public class HashedAggregator {
|
||||||
BoundedTreeSet<InternalStringTermsFacet.TermEntry> ordered = new BoundedTreeSet<InternalStringTermsFacet.TermEntry>(comparatorType.comparator(), size);
|
BoundedTreeSet<InternalStringTermsFacet.TermEntry> ordered = new BoundedTreeSet<InternalStringTermsFacet.TermEntry>(comparatorType.comparator(), size);
|
||||||
BytesRefCountIterator iter = aggregator.getIter();
|
BytesRefCountIterator iter = aggregator.getIter();
|
||||||
BytesRef next = null;
|
BytesRef next = null;
|
||||||
while((next = iter.next()) != null) {
|
while ((next = iter.next()) != null) {
|
||||||
ordered.add(new InternalStringTermsFacet.TermEntry(BytesRef.deepCopyOf(next), iter.count()));
|
ordered.add(new InternalStringTermsFacet.TermEntry(BytesRef.deepCopyOf(next), iter.count()));
|
||||||
// maybe we can survive with a 0-copy here if we keep the bytes ref hash around?
|
// maybe we can survive with a 0-copy here if we keep the bytes ref hash around?
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,32 +19,21 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.facet.terms.strings;
|
package org.elasticsearch.search.facet.terms.strings;
|
||||||
|
|
||||||
import gnu.trove.iterator.TObjectIntIterator;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import gnu.trove.map.hash.TObjectIntHashMap;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.elasticsearch.common.CacheRecycler;
|
|
||||||
import org.elasticsearch.common.collect.BoundedTreeSet;
|
|
||||||
import org.elasticsearch.common.lucene.HashedBytesRef;
|
|
||||||
import org.elasticsearch.index.fielddata.BytesValues;
|
import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.script.SearchScript;
|
import org.elasticsearch.script.SearchScript;
|
||||||
import org.elasticsearch.search.facet.FacetExecutor;
|
import org.elasticsearch.search.facet.FacetExecutor;
|
||||||
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
|
|
||||||
import org.elasticsearch.search.facet.InternalFacet;
|
import org.elasticsearch.search.facet.InternalFacet;
|
||||||
import org.elasticsearch.search.facet.terms.TermsFacet;
|
import org.elasticsearch.search.facet.terms.TermsFacet;
|
||||||
import org.elasticsearch.search.facet.terms.strings.HashedAggregator.BytesRefCountIterator;
|
|
||||||
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
|
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import java.io.IOException;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
|
@ -70,12 +59,16 @@ public class TermsStringFacetExecutor extends FacetExecutor {
|
||||||
this.comparatorType = comparatorType;
|
this.comparatorType = comparatorType;
|
||||||
this.script = script;
|
this.script = script;
|
||||||
this.allTerms = allTerms;
|
this.allTerms = allTerms;
|
||||||
|
|
||||||
if (excluded.isEmpty() && pattern == null && script == null) {
|
if (excluded.isEmpty() && pattern == null && script == null) {
|
||||||
aggregator = new HashedAggregator();
|
aggregator = new HashedAggregator();
|
||||||
} else {
|
} else {
|
||||||
aggregator = new HashedScriptAggregator(excluded, pattern, script);
|
aggregator = new HashedScriptAggregator(excluded, pattern, script);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (allTerms) {
|
||||||
|
loadAllTerms(context, indexFieldData, aggregator);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -125,5 +118,45 @@ public class TermsStringFacetExecutor extends FacetExecutor {
|
||||||
TermsStringFacetExecutor.this.total = aggregator.total();
|
TermsStringFacetExecutor.this.total = aggregator.total();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void loadAllTerms(SearchContext context, IndexFieldData indexFieldData, HashedAggregator aggregator) {
|
||||||
|
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
||||||
|
int maxDoc = readerContext.reader().maxDoc();
|
||||||
|
if (indexFieldData instanceof IndexFieldData.WithOrdinals) {
|
||||||
|
BytesValues.WithOrdinals values = ((IndexFieldData.WithOrdinals) indexFieldData).load(readerContext).getBytesValues();
|
||||||
|
Ordinals.Docs ordinals = values.ordinals();
|
||||||
|
// 0 = docs with no value for field, so start from 1 instead
|
||||||
|
for (int ord = 1; ord < ordinals.getMaxOrd(); ord++) {
|
||||||
|
BytesRef value = values.getValueByOrd(ord);
|
||||||
|
aggregator.addValue(value, value.hashCode());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
BytesValues values = indexFieldData.load(readerContext).getBytesValues();
|
||||||
|
// Shouldn't be true, otherwise it is WithOrdinals... just to be sure...
|
||||||
|
if (values.isMultiValued()) {
|
||||||
|
for (int docId = 0; docId < maxDoc; docId++) {
|
||||||
|
if (!values.hasValue(docId)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
BytesValues.Iter iter = values.getIter(docId);
|
||||||
|
while (iter.hasNext()) {
|
||||||
|
aggregator.addValue(iter.next(), iter.hash());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
BytesRef spare = new BytesRef();
|
||||||
|
for (int docId = 0; docId < maxDoc; docId++) {
|
||||||
|
if (!values.hasValue(docId)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
int hash = values.getValueHashed(docId, spare);
|
||||||
|
aggregator.addValue(spare, hash);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -148,7 +148,7 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
assertThat(facet.getEntries().get(1).getCount(), equalTo(1));
|
assertThat(facet.getEntries().get(1).getCount(), equalTo(1));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFacetNumeric() throws ElasticSearchException, IOException {
|
public void testFacetNumeric() throws ElasticSearchException, IOException {
|
||||||
// TODO we should test this with more complex queries
|
// TODO we should test this with more complex queries
|
||||||
|
@ -157,7 +157,7 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
// ignore
|
// ignore
|
||||||
}
|
}
|
||||||
|
|
||||||
client.admin().indices().prepareCreate("test")
|
client.admin().indices().prepareCreate("test")
|
||||||
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||||
.startObject("byte").field("type", "byte").endObject()
|
.startObject("byte").field("type", "byte").endObject()
|
||||||
|
@ -182,16 +182,16 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
.field("double", (double)i)
|
.field("double", (double)i)
|
||||||
.endObject()).execute().actionGet();
|
.endObject()).execute().actionGet();
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int i = 0; i < 10; i++) {
|
for (int i = 0; i < 10; i++) {
|
||||||
client.prepareIndex("test", "type", ""+(i + 100)).setSource(jsonBuilder().startObject()
|
client.prepareIndex("test", "type", ""+(i + 100)).setSource(jsonBuilder().startObject()
|
||||||
.field("foo", ""+i)
|
.field("foo", ""+i)
|
||||||
.endObject()).execute().actionGet();
|
.endObject()).execute().actionGet();
|
||||||
}
|
}
|
||||||
|
|
||||||
String[] execHint = new String[] {"map", null};
|
String[] execHint = new String[] {"map", null};
|
||||||
for (String hint : execHint) {
|
for (String hint : execHint) {
|
||||||
|
|
||||||
client.admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
client.admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||||
SearchResponse searchResponse = client.prepareSearch()
|
SearchResponse searchResponse = client.prepareSearch()
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
|
@ -205,9 +205,9 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
.addFacet(termsFacet("termFacetRegex").executionHint(hint).field("multiValued").regex("9\\d").size(20))
|
.addFacet(termsFacet("termFacetRegex").executionHint(hint).field("multiValued").regex("9\\d").size(20))
|
||||||
.addFacet(termsFacet("termFacetScript").executionHint(hint).field("multiValued").script("Integer.toHexString(Integer.parseInt(term))").size(10))
|
.addFacet(termsFacet("termFacetScript").executionHint(hint).field("multiValued").script("Integer.toHexString(Integer.parseInt(term))").size(10))
|
||||||
.addFacet(termsFacet("termFacetScriptRegex").executionHint(hint).field("multiValued").script("Integer.toHexString(Integer.parseInt(term))").regex("9\\d").size(20))
|
.addFacet(termsFacet("termFacetScriptRegex").executionHint(hint).field("multiValued").script("Integer.toHexString(Integer.parseInt(term))").regex("9\\d").size(20))
|
||||||
|
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(110l));
|
assertThat(searchResponse.getHits().getTotalHits(), equalTo(110l));
|
||||||
TermsFacet facet = searchResponse.getFacets().facet("termFacet");
|
TermsFacet facet = searchResponse.getFacets().facet("termFacet");
|
||||||
assertThat(facet.getName(), equalTo("termFacet"));
|
assertThat(facet.getName(), equalTo("termFacet"));
|
||||||
|
@ -215,85 +215,85 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
|
|
||||||
facet = searchResponse.getFacets().facet("termFacetRegex");
|
facet = searchResponse.getFacets().facet("termFacetRegex");
|
||||||
assertThat(facet.getName(), equalTo("termFacetRegex"));
|
assertThat(facet.getName(), equalTo("termFacetRegex"));
|
||||||
assertThat(facet.getEntries().size(), equalTo(10));
|
assertThat(facet.getEntries().size(), equalTo(10));
|
||||||
assertThat(facet.getTotalCount(), equalTo(190l));
|
assertThat(facet.getTotalCount(), equalTo(190l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
|
|
||||||
int count = 99;
|
int count = 99;
|
||||||
for (Entry entry : facet) {
|
for (Entry entry : facet) {
|
||||||
assertThat(Integer.parseInt(entry.getTerm().string()), equalTo(count--));
|
assertThat(Integer.parseInt(entry.getTerm().string()), equalTo(count--));
|
||||||
assertThat(entry.getCount(), equalTo(10));
|
assertThat(entry.getCount(), equalTo(10));
|
||||||
}
|
}
|
||||||
|
|
||||||
facet = searchResponse.getFacets().facet("termFacetScriptRegex");
|
facet = searchResponse.getFacets().facet("termFacetScriptRegex");
|
||||||
assertThat(facet.getName(), equalTo("termFacetScriptRegex"));
|
assertThat(facet.getName(), equalTo("termFacetScriptRegex"));
|
||||||
assertThat(facet.getEntries().size(), equalTo(10));
|
assertThat(facet.getEntries().size(), equalTo(10));
|
||||||
assertThat(facet.getTotalCount(), equalTo(190l));
|
assertThat(facet.getTotalCount(), equalTo(190l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
|
|
||||||
count = 99;
|
count = 99;
|
||||||
for (Entry entry : facet) {
|
for (Entry entry : facet) {
|
||||||
assertThat(entry.getTerm().string(), equalTo(Integer.toHexString(count--)));
|
assertThat(entry.getTerm().string(), equalTo(Integer.toHexString(count--)));
|
||||||
assertThat(entry.getCount(), equalTo(10));
|
assertThat(entry.getCount(), equalTo(10));
|
||||||
}
|
}
|
||||||
|
|
||||||
facet = searchResponse.getFacets().facet("termFacetScript");
|
facet = searchResponse.getFacets().facet("termFacetScript");
|
||||||
assertThat(facet.getName(), equalTo("termFacetScript"));
|
assertThat(facet.getName(), equalTo("termFacetScript"));
|
||||||
assertThat(facet.getEntries().size(), equalTo(10));
|
assertThat(facet.getEntries().size(), equalTo(10));
|
||||||
assertThat(facet.getTotalCount(), equalTo(190l));
|
assertThat(facet.getTotalCount(), equalTo(190l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
|
|
||||||
count = 99;
|
count = 99;
|
||||||
for (Entry entry : facet) {
|
for (Entry entry : facet) {
|
||||||
assertThat(entry.getTerm().string(), equalTo(Integer.toHexString(count--)));
|
assertThat(entry.getTerm().string(), equalTo(Integer.toHexString(count--)));
|
||||||
assertThat(entry.getCount(), equalTo(10));
|
assertThat(entry.getCount(), equalTo(10));
|
||||||
}
|
}
|
||||||
|
|
||||||
facet = searchResponse.getFacets().facet("double");
|
facet = searchResponse.getFacets().facet("double");
|
||||||
assertThat(facet.getName(), equalTo("double"));
|
assertThat(facet.getName(), equalTo("double"));
|
||||||
assertThat(facet.getEntries().size(), equalTo(10));
|
assertThat(facet.getEntries().size(), equalTo(10));
|
||||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
|
|
||||||
facet = searchResponse.getFacets().facet("float");
|
facet = searchResponse.getFacets().facet("float");
|
||||||
assertThat(facet.getName(), equalTo("float"));
|
assertThat(facet.getName(), equalTo("float"));
|
||||||
assertThat(facet.getEntries().size(), equalTo(10));
|
assertThat(facet.getEntries().size(), equalTo(10));
|
||||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
|
|
||||||
facet = searchResponse.getFacets().facet("long");
|
facet = searchResponse.getFacets().facet("long");
|
||||||
assertThat(facet.getName(), equalTo("long"));
|
assertThat(facet.getName(), equalTo("long"));
|
||||||
assertThat(facet.getEntries().size(), equalTo(10));
|
assertThat(facet.getEntries().size(), equalTo(10));
|
||||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
|
|
||||||
facet = searchResponse.getFacets().facet("integer");
|
facet = searchResponse.getFacets().facet("integer");
|
||||||
assertThat(facet.getName(), equalTo("integer"));
|
assertThat(facet.getName(), equalTo("integer"));
|
||||||
assertThat(facet.getEntries().size(), equalTo(10));
|
assertThat(facet.getEntries().size(), equalTo(10));
|
||||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
|
|
||||||
facet = searchResponse.getFacets().facet("short");
|
facet = searchResponse.getFacets().facet("short");
|
||||||
assertThat(facet.getName(), equalTo("short"));
|
assertThat(facet.getName(), equalTo("short"));
|
||||||
assertThat(facet.getEntries().size(), equalTo(10));
|
assertThat(facet.getEntries().size(), equalTo(10));
|
||||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testConcurrentFacets() throws ElasticSearchException, IOException, InterruptedException, ExecutionException {
|
public void testConcurrentFacets() throws ElasticSearchException, IOException, InterruptedException, ExecutionException {
|
||||||
try {
|
try {
|
||||||
|
@ -301,7 +301,7 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
// ignore
|
// ignore
|
||||||
}
|
}
|
||||||
|
|
||||||
client.admin().indices().prepareCreate("test")
|
client.admin().indices().prepareCreate("test")
|
||||||
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||||
.startObject("byte").field("type", "byte").endObject()
|
.startObject("byte").field("type", "byte").endObject()
|
||||||
|
@ -325,20 +325,20 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
.field("double", (double)i)
|
.field("double", (double)i)
|
||||||
.endObject()).execute().actionGet();
|
.endObject()).execute().actionGet();
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int i = 0; i < 10; i++) {
|
for (int i = 0; i < 10; i++) {
|
||||||
client.prepareIndex("test", "type", ""+(i + 100)).setSource(jsonBuilder().startObject()
|
client.prepareIndex("test", "type", ""+(i + 100)).setSource(jsonBuilder().startObject()
|
||||||
.field("foo", ""+i)
|
.field("foo", ""+i)
|
||||||
.endObject()).execute().actionGet();
|
.endObject()).execute().actionGet();
|
||||||
}
|
}
|
||||||
|
|
||||||
client.admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
client.admin().indices().prepareFlush().setRefresh(true).execute().actionGet();
|
||||||
ConcurrentDuel<Facets> duel = new ConcurrentDuel<Facets>(5);
|
ConcurrentDuel<Facets> duel = new ConcurrentDuel<Facets>(5);
|
||||||
{
|
{
|
||||||
final Client cl = client;
|
final Client cl = client;
|
||||||
|
|
||||||
duel.duel(new ConcurrentDuel.DuelJudge<Facets>() {
|
duel.duel(new ConcurrentDuel.DuelJudge<Facets>() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void judge(Facets firstRun, Facets result) {
|
public void judge(Facets firstRun, Facets result) {
|
||||||
for (Facet f : result) {
|
for (Facet f : result) {
|
||||||
|
@ -346,15 +346,15 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
assertThat(facet.getName(), isIn(new String[] {"short", "double", "byte", "float", "integer", "long", "termFacet"}));
|
assertThat(facet.getName(), isIn(new String[] {"short", "double", "byte", "float", "integer", "long", "termFacet"}));
|
||||||
TermsFacet firstRunFacet = (TermsFacet) firstRun.getFacets().get(facet.getName());
|
TermsFacet firstRunFacet = (TermsFacet) firstRun.getFacets().get(facet.getName());
|
||||||
assertThat(facet.getEntries().size(), equalTo(firstRunFacet.getEntries().size()));
|
assertThat(facet.getEntries().size(), equalTo(firstRunFacet.getEntries().size()));
|
||||||
|
|
||||||
assertThat(facet.getEntries().size(), equalTo(10));
|
assertThat(facet.getEntries().size(), equalTo(10));
|
||||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
|
|
||||||
List<? extends Entry> right = facet.getEntries();
|
List<? extends Entry> right = facet.getEntries();
|
||||||
List<? extends Entry> left = firstRunFacet.getEntries();
|
List<? extends Entry> left = firstRunFacet.getEntries();
|
||||||
|
|
||||||
for (int i = 0; i < facet.getEntries().size(); i++) {
|
for (int i = 0; i < facet.getEntries().size(); i++) {
|
||||||
assertThat(left.get(i).getTerm(), equalTo(right.get(i).getTerm()));
|
assertThat(left.get(i).getTerm(), equalTo(right.get(i).getTerm()));
|
||||||
assertThat(left.get(i).getCount(), equalTo(right.get(i).getCount()));
|
assertThat(left.get(i).getCount(), equalTo(right.get(i).getCount()));
|
||||||
|
@ -386,7 +386,7 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
.addFacet(termsFacet("byte").executionHint("map").field("byte").size(10))
|
.addFacet(termsFacet("byte").executionHint("map").field("byte").size(10))
|
||||||
.addFacet(termsFacet("termFacet").executionHint("map").field("name").size(10));
|
.addFacet(termsFacet("termFacet").executionHint("map").field("name").size(10));
|
||||||
}
|
}
|
||||||
|
|
||||||
SearchResponse actionGet = facetRequest.execute().actionGet();
|
SearchResponse actionGet = facetRequest.execute().actionGet();
|
||||||
return actionGet.getFacets();
|
return actionGet.getFacets();
|
||||||
}
|
}
|
||||||
|
@ -394,7 +394,7 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
duel.duel(new ConcurrentDuel.DuelJudge<Facets>() {
|
duel.duel(new ConcurrentDuel.DuelJudge<Facets>() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void judge(Facets firstRun, Facets result) {
|
public void judge(Facets firstRun, Facets result) {
|
||||||
for (Facet f : result) {
|
for (Facet f : result) {
|
||||||
|
@ -402,19 +402,19 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
assertThat(facet.getName(), equalTo("termFacet"));
|
assertThat(facet.getName(), equalTo("termFacet"));
|
||||||
TermsFacet firstRunFacet = (TermsFacet) firstRun.getFacets().get(facet.getName());
|
TermsFacet firstRunFacet = (TermsFacet) firstRun.getFacets().get(facet.getName());
|
||||||
assertThat(facet.getEntries().size(), equalTo(firstRunFacet.getEntries().size()));
|
assertThat(facet.getEntries().size(), equalTo(firstRunFacet.getEntries().size()));
|
||||||
|
|
||||||
assertThat(facet.getEntries().size(), equalTo(10));
|
assertThat(facet.getEntries().size(), equalTo(10));
|
||||||
assertThat(facet.getTotalCount(), equalTo(100l));
|
assertThat(facet.getTotalCount(), equalTo(100l));
|
||||||
assertThat(facet.getOtherCount(), equalTo(90l));
|
assertThat(facet.getOtherCount(), equalTo(90l));
|
||||||
assertThat(facet.getMissingCount(), equalTo(10l));
|
assertThat(facet.getMissingCount(), equalTo(10l));
|
||||||
|
|
||||||
List<? extends Entry> right = facet.getEntries();
|
List<? extends Entry> right = facet.getEntries();
|
||||||
List<? extends Entry> left = firstRunFacet.getEntries();
|
List<? extends Entry> left = firstRunFacet.getEntries();
|
||||||
|
|
||||||
for (int i = 0; i < facet.getEntries().size(); i++) {
|
for (int i = 0; i < facet.getEntries().size(); i++) {
|
||||||
assertThat(left.get(i).getTerm(), equalTo(right.get(i).getTerm()));
|
assertThat(left.get(i).getTerm(), equalTo(right.get(i).getTerm()));
|
||||||
assertThat(left.get(i).getCount(), equalTo(right.get(i).getCount()));
|
assertThat(left.get(i).getCount(), equalTo(right.get(i).getCount()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, new ConcurrentDuel.DuelExecutor<Facets>() {
|
}, new ConcurrentDuel.DuelExecutor<Facets>() {
|
||||||
|
@ -459,10 +459,10 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
}
|
}
|
||||||
}, 5000);
|
}, 5000);
|
||||||
}
|
}
|
||||||
|
|
||||||
duel.close();
|
duel.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchFilter() throws Exception {
|
public void testSearchFilter() throws Exception {
|
||||||
try {
|
try {
|
||||||
|
@ -1061,21 +1061,67 @@ public class SimpleFacetsTests extends AbstractNodesTests {
|
||||||
assertThat(facet.getEntries().get(3).getTerm().string(), anyOf(equalTo("zzz"), equalTo("xxx")));
|
assertThat(facet.getEntries().get(3).getTerm().string(), anyOf(equalTo("zzz"), equalTo("xxx")));
|
||||||
assertThat(facet.getEntries().get(3).getCount(), equalTo(1));
|
assertThat(facet.getEntries().get(3).getCount(), equalTo(1));
|
||||||
|
|
||||||
// TODO: support allTerms with the new field data
|
searchResponse = client.prepareSearch()
|
||||||
// searchResponse = client.prepareSearch()
|
.setQuery(termQuery("xxx", "yyy")) // don't match anything
|
||||||
// .setQuery(termQuery("xxx", "yyy")) // don't match anything
|
.addFacet(termsFacet("facet1").field("tag").size(10).allTerms(true).executionHint(executionHint))
|
||||||
// .addFacet(termsFacet("facet1").field("tag").size(10).allTerms(true).executionHint(executionHint))
|
.execute().actionGet();
|
||||||
// .execute().actionGet();
|
|
||||||
//
|
facet = searchResponse.getFacets().facet("facet1");
|
||||||
// facet = searchResponse.facets().facet("facet1");
|
assertThat(facet.getName(), equalTo("facet1"));
|
||||||
// assertThat(facet.getName(), equalTo("facet1"));
|
assertThat(facet.getEntries().size(), equalTo(3));
|
||||||
// assertThat(facet.getEntries().size(), equalTo(3));
|
assertThat(facet.getEntries().get(0).getTerm().string(), anyOf(equalTo("xxx"), equalTo("yyy"), equalTo("zzz")));
|
||||||
// assertThat(facet.getEntries().get(0).getTerm().string(), anyOf(equalTo("xxx"), equalTo("yyy"), equalTo("zzz")));
|
assertThat(facet.getEntries().get(0).getCount(), equalTo(0));
|
||||||
// assertThat(facet.getEntries().get(0).getCount(), equalTo(0));
|
assertThat(facet.getEntries().get(1).getTerm().string(), anyOf(equalTo("xxx"), equalTo("yyy"), equalTo("zzz")));
|
||||||
// assertThat(facet.getEntries().get(1).getTerm().string(), anyOf(equalTo("xxx"), equalTo("yyy"), equalTo("zzz")));
|
assertThat(facet.getEntries().get(1).getCount(), equalTo(0));
|
||||||
// assertThat(facet.getEntries().get(1).getCount(), equalTo(0));
|
assertThat(facet.getEntries().get(2).getTerm().string(), anyOf(equalTo("xxx"), equalTo("yyy"), equalTo("zzz")));
|
||||||
// assertThat(facet.getEntries().get(2).getTerm().string(), anyOf(equalTo("xxx"), equalTo("yyy"), equalTo("zzz")));
|
assertThat(facet.getEntries().get(2).getCount(), equalTo(0));
|
||||||
// assertThat(facet.getEntries().get(2).getCount(), equalTo(0));
|
|
||||||
|
searchResponse = client.prepareSearch()
|
||||||
|
.setQuery(termQuery("xxx", "yyy")) // don't match anything
|
||||||
|
.addFacet(termsFacet("facet1").fields("tag", "stag").size(10).allTerms(true).executionHint(executionHint))
|
||||||
|
.execute().actionGet();
|
||||||
|
|
||||||
|
facet = searchResponse.getFacets().facet("facet1");
|
||||||
|
assertThat(facet.getName(), equalTo("facet1"));
|
||||||
|
assertThat(facet.getEntries().size(), equalTo(4));
|
||||||
|
assertThat(facet.getEntries().get(0).getTerm().string(), anyOf(equalTo("xxx"), equalTo("yyy"), equalTo("zzz"), equalTo("111")));
|
||||||
|
assertThat(facet.getEntries().get(0).getCount(), equalTo(0));
|
||||||
|
assertThat(facet.getEntries().get(1).getTerm().string(), anyOf(equalTo("xxx"), equalTo("yyy"), equalTo("zzz"), equalTo("111")));
|
||||||
|
assertThat(facet.getEntries().get(1).getCount(), equalTo(0));
|
||||||
|
assertThat(facet.getEntries().get(2).getTerm().string(), anyOf(equalTo("xxx"), equalTo("yyy"), equalTo("zzz"), equalTo("111")));
|
||||||
|
assertThat(facet.getEntries().get(2).getCount(), equalTo(0));
|
||||||
|
assertThat(facet.getEntries().get(3).getTerm().string(), anyOf(equalTo("xxx"), equalTo("yyy"), equalTo("zzz"), equalTo("111")));
|
||||||
|
assertThat(facet.getEntries().get(3).getCount(), equalTo(0));
|
||||||
|
|
||||||
|
searchResponse = client.prepareSearch()
|
||||||
|
.setQuery(termQuery("xxx", "yyy")) // don't match anything
|
||||||
|
.addFacet(termsFacet("facet1").field("ltag").size(10).allTerms(true).executionHint(executionHint))
|
||||||
|
.execute().actionGet();
|
||||||
|
|
||||||
|
facet = searchResponse.getFacets().facet("facet1");
|
||||||
|
assertThat(facet.getName(), equalTo("facet1"));
|
||||||
|
assertThat(facet.getEntries().size(), equalTo(3));
|
||||||
|
assertThat(facet.getEntries().get(0).getTermAsNumber().intValue(), anyOf(equalTo(1000), equalTo(2000), equalTo(3000)));
|
||||||
|
assertThat(facet.getEntries().get(0).getCount(), equalTo(0));
|
||||||
|
assertThat(facet.getEntries().get(1).getTermAsNumber().intValue(), anyOf(equalTo(1000), equalTo(2000), equalTo(3000)));
|
||||||
|
assertThat(facet.getEntries().get(1).getCount(), equalTo(0));
|
||||||
|
assertThat(facet.getEntries().get(2).getTermAsNumber().intValue(), anyOf(equalTo(1000), equalTo(2000), equalTo(3000)));
|
||||||
|
assertThat(facet.getEntries().get(2).getCount(), equalTo(0));
|
||||||
|
|
||||||
|
searchResponse = client.prepareSearch()
|
||||||
|
.setQuery(termQuery("xxx", "yyy")) // don't match anything
|
||||||
|
.addFacet(termsFacet("facet1").field("dtag").size(10).allTerms(true).executionHint(executionHint))
|
||||||
|
.execute().actionGet();
|
||||||
|
|
||||||
|
facet = searchResponse.getFacets().facet("facet1");
|
||||||
|
assertThat(facet.getName(), equalTo("facet1"));
|
||||||
|
assertThat(facet.getEntries().size(), equalTo(3));
|
||||||
|
assertThat(facet.getEntries().get(0).getTermAsNumber().doubleValue(), anyOf(equalTo(1000.1), equalTo(2000.1), equalTo(3000.1)));
|
||||||
|
assertThat(facet.getEntries().get(0).getCount(), equalTo(0));
|
||||||
|
assertThat(facet.getEntries().get(1).getTermAsNumber().doubleValue(), anyOf(equalTo(1000.1), equalTo(2000.1), equalTo(3000.1)));
|
||||||
|
assertThat(facet.getEntries().get(1).getCount(), equalTo(0));
|
||||||
|
assertThat(facet.getEntries().get(2).getTermAsNumber().doubleValue(), anyOf(equalTo(1000.1), equalTo(2000.1), equalTo(3000.1)));
|
||||||
|
assertThat(facet.getEntries().get(2).getCount(), equalTo(0));
|
||||||
|
|
||||||
// Script Field
|
// Script Field
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue