mirror of https://github.com/apache/druid.git
Merge pull request #2260 from navis/cardinality-for-searchquery
Support cardinality for search query
This commit is contained in:
commit
dbdbacaa18
|
@ -39,6 +39,7 @@ There are several main parts to a search query:
|
||||||
|searchDimensions|The dimensions to run the search over. Excluding this means the search is run over all dimensions.|no|
|
|searchDimensions|The dimensions to run the search over. Excluding this means the search is run over all dimensions.|no|
|
||||||
|query|See [SearchQuerySpec](../querying/searchqueryspec.html).|yes|
|
|query|See [SearchQuerySpec](../querying/searchqueryspec.html).|yes|
|
||||||
|sort|An object specifying how the results of the search should be sorted. Two possible types here are "lexicographic" (the default sort) and "strlen".|no|
|
|sort|An object specifying how the results of the search should be sorted. Two possible types here are "lexicographic" (the default sort) and "strlen".|no|
|
||||||
|
|computeCount|Include appearance count of each value in result. False by default.|no|
|
||||||
|context|See [Context](../querying/query-context.html)|no|
|
|context|See [Context](../querying/query-context.html)|no|
|
||||||
|
|
||||||
The format of the result is:
|
The format of the result is:
|
||||||
|
|
|
@ -44,6 +44,7 @@ import io.druid.query.search.search.FragmentSearchQuerySpec;
|
||||||
import io.druid.query.search.search.InsensitiveContainsSearchQuerySpec;
|
import io.druid.query.search.search.InsensitiveContainsSearchQuerySpec;
|
||||||
import io.druid.query.search.search.SearchQuery;
|
import io.druid.query.search.search.SearchQuery;
|
||||||
import io.druid.query.search.search.SearchQuerySpec;
|
import io.druid.query.search.search.SearchQuerySpec;
|
||||||
|
import io.druid.query.search.search.SearchSortSpec;
|
||||||
import io.druid.query.select.PagingSpec;
|
import io.druid.query.select.PagingSpec;
|
||||||
import io.druid.query.select.SelectQuery;
|
import io.druid.query.select.SelectQuery;
|
||||||
import io.druid.query.spec.LegacySegmentSpec;
|
import io.druid.query.spec.LegacySegmentSpec;
|
||||||
|
@ -547,6 +548,7 @@ public class Druids
|
||||||
private QuerySegmentSpec querySegmentSpec;
|
private QuerySegmentSpec querySegmentSpec;
|
||||||
private List<DimensionSpec> dimensions;
|
private List<DimensionSpec> dimensions;
|
||||||
private SearchQuerySpec querySpec;
|
private SearchQuerySpec querySpec;
|
||||||
|
private SearchSortSpec sortSpec;
|
||||||
private Map<String, Object> context;
|
private Map<String, Object> context;
|
||||||
|
|
||||||
public SearchQueryBuilder()
|
public SearchQueryBuilder()
|
||||||
|
@ -571,7 +573,7 @@ public class Druids
|
||||||
querySegmentSpec,
|
querySegmentSpec,
|
||||||
dimensions,
|
dimensions,
|
||||||
querySpec,
|
querySpec,
|
||||||
null,
|
sortSpec,
|
||||||
context
|
context
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -735,6 +737,12 @@ public class Druids
|
||||||
return fragments(q, false);
|
return fragments(q, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SearchQueryBuilder sortSpec(SearchSortSpec sortSpec)
|
||||||
|
{
|
||||||
|
this.sortSpec = sortSpec;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public SearchQueryBuilder fragments(List<String> q, boolean caseSensitive)
|
public SearchQueryBuilder fragments(List<String> q, boolean caseSensitive)
|
||||||
{
|
{
|
||||||
Preconditions.checkNotNull(q, "no value");
|
Preconditions.checkNotNull(q, "no value");
|
||||||
|
|
|
@ -21,15 +21,16 @@ package io.druid.query.search;
|
||||||
|
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Sets;
|
|
||||||
import com.metamx.common.guava.nary.BinaryFn;
|
import com.metamx.common.guava.nary.BinaryFn;
|
||||||
import io.druid.granularity.AllGranularity;
|
import io.druid.granularity.AllGranularity;
|
||||||
import io.druid.granularity.QueryGranularity;
|
import io.druid.granularity.QueryGranularity;
|
||||||
import io.druid.query.Result;
|
import io.druid.query.Result;
|
||||||
import io.druid.query.search.search.SearchHit;
|
import io.druid.query.search.search.SearchHit;
|
||||||
import io.druid.query.search.search.SearchSortSpec;
|
import io.druid.query.search.search.SearchSortSpec;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
|
||||||
import java.util.TreeSet;
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
|
@ -62,24 +63,53 @@ public class SearchBinaryFn
|
||||||
return arg1;
|
return arg1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final int limit = gran instanceof AllGranularity ? this.limit : -1;
|
||||||
|
|
||||||
SearchResultValue arg1Vals = arg1.getValue();
|
SearchResultValue arg1Vals = arg1.getValue();
|
||||||
SearchResultValue arg2Vals = arg2.getValue();
|
SearchResultValue arg2Vals = arg2.getValue();
|
||||||
|
|
||||||
TreeSet<SearchHit> results = Sets.newTreeSet(searchSortSpec.getComparator());
|
Iterable<SearchHit> merged = Iterables.mergeSorted(
|
||||||
results.addAll(Lists.newArrayList(arg1Vals));
|
Arrays.asList(arg1Vals, arg2Vals),
|
||||||
results.addAll(Lists.newArrayList(arg2Vals));
|
searchSortSpec.getComparator()
|
||||||
|
);
|
||||||
|
|
||||||
return (gran instanceof AllGranularity)
|
int maxSize = arg1Vals.getValue().size() + arg2Vals.getValue().size();
|
||||||
? new Result<SearchResultValue>(
|
if (limit > 0) {
|
||||||
arg1.getTimestamp(), new SearchResultValue(
|
maxSize = Math.min(limit, maxSize);
|
||||||
Lists.newArrayList(
|
}
|
||||||
Iterables.limit(results, limit)
|
List<SearchHit> results = Lists.newArrayListWithExpectedSize(maxSize);
|
||||||
)
|
|
||||||
)
|
SearchHit prev = null;
|
||||||
)
|
for (SearchHit searchHit : merged) {
|
||||||
: new Result<SearchResultValue>(
|
if (prev == null) {
|
||||||
gran.toDateTime(gran.truncate(arg1.getTimestamp().getMillis())),
|
prev = searchHit;
|
||||||
new SearchResultValue(Lists.newArrayList(results))
|
continue;
|
||||||
);
|
}
|
||||||
|
if (prev.equals(searchHit)) {
|
||||||
|
if (prev.getCount() != null) {
|
||||||
|
prev = new SearchHit(
|
||||||
|
prev.getDimension(),
|
||||||
|
prev.getValue(),
|
||||||
|
prev.getCount() + searchHit.getCount()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
results.add(prev);
|
||||||
|
prev = searchHit;
|
||||||
|
if (limit > 0 && results.size() >= limit) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (prev != null && (limit < 0 || results.size() < limit)) {
|
||||||
|
results.add(prev);
|
||||||
|
}
|
||||||
|
|
||||||
|
final DateTime timestamp = gran instanceof AllGranularity
|
||||||
|
? arg1.getTimestamp()
|
||||||
|
: gran.toDateTime(gran.truncate(arg1.getTimestamp().getMillis()));
|
||||||
|
|
||||||
|
return new Result<SearchResultValue>(timestamp, new SearchResultValue(results));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -157,16 +157,20 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
|
||||||
++index;
|
++index;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final byte[] sortSpecBytes = query.getSort().getCacheKey();
|
||||||
|
|
||||||
final ByteBuffer queryCacheKey = ByteBuffer
|
final ByteBuffer queryCacheKey = ByteBuffer
|
||||||
.allocate(
|
.allocate(
|
||||||
1 + 4 + granularityBytes.length + filterBytes.length +
|
1 + 4 + granularityBytes.length + filterBytes.length +
|
||||||
querySpecBytes.length + dimensionsBytesSize
|
querySpecBytes.length + dimensionsBytesSize + sortSpecBytes.length
|
||||||
)
|
)
|
||||||
.put(SEARCH_QUERY)
|
.put(SEARCH_QUERY)
|
||||||
.put(Ints.toByteArray(query.getLimit()))
|
.put(Ints.toByteArray(query.getLimit()))
|
||||||
.put(granularityBytes)
|
.put(granularityBytes)
|
||||||
.put(filterBytes)
|
.put(filterBytes)
|
||||||
.put(querySpecBytes);
|
.put(querySpecBytes)
|
||||||
|
.put(sortSpecBytes)
|
||||||
|
;
|
||||||
|
|
||||||
for (byte[] bytes : dimensionsBytes) {
|
for (byte[] bytes : dimensionsBytes) {
|
||||||
queryCacheKey.put(bytes);
|
queryCacheKey.put(bytes);
|
||||||
|
|
|
@ -19,12 +19,12 @@
|
||||||
|
|
||||||
package io.druid.query.search;
|
package io.druid.query.search;
|
||||||
|
|
||||||
|
import com.google.common.base.Function;
|
||||||
import com.google.common.base.Strings;
|
import com.google.common.base.Strings;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.Sets;
|
|
||||||
import com.metamx.collections.bitmap.BitmapFactory;
|
import com.metamx.collections.bitmap.BitmapFactory;
|
||||||
import com.metamx.collections.bitmap.ImmutableBitmap;
|
import com.metamx.collections.bitmap.ImmutableBitmap;
|
||||||
import com.metamx.common.ISE;
|
import com.metamx.common.ISE;
|
||||||
|
@ -55,11 +55,12 @@ import io.druid.segment.column.BitmapIndex;
|
||||||
import io.druid.segment.column.Column;
|
import io.druid.segment.column.Column;
|
||||||
import io.druid.segment.data.IndexedInts;
|
import io.druid.segment.data.IndexedInts;
|
||||||
import io.druid.segment.filter.Filters;
|
import io.druid.segment.filter.Filters;
|
||||||
|
import org.apache.commons.lang.mutable.MutableInt;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
|
@ -94,7 +95,7 @@ public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
|
||||||
final QueryableIndex index = segment.asQueryableIndex();
|
final QueryableIndex index = segment.asQueryableIndex();
|
||||||
|
|
||||||
if (index != null) {
|
if (index != null) {
|
||||||
final TreeSet<SearchHit> retVal = Sets.newTreeSet(query.getSort().getComparator());
|
final TreeMap<SearchHit, MutableInt> retVal = Maps.newTreeMap(query.getSort().getComparator());
|
||||||
|
|
||||||
Iterable<DimensionSpec> dimsToSearch;
|
Iterable<DimensionSpec> dimsToSearch;
|
||||||
if (dimensions == null || dimensions.isEmpty()) {
|
if (dimensions == null || dimensions.isEmpty()) {
|
||||||
|
@ -105,13 +106,8 @@ public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
|
||||||
|
|
||||||
final BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions();
|
final BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions();
|
||||||
|
|
||||||
final ImmutableBitmap baseFilter;
|
final ImmutableBitmap baseFilter =
|
||||||
if (filter == null) {
|
filter == null ? null : filter.getBitmapIndex(new ColumnSelectorBitmapIndexSelector(bitmapFactory, index));
|
||||||
baseFilter = bitmapFactory.complement(bitmapFactory.makeEmptyImmutableBitmap(), index.getNumRows());
|
|
||||||
} else {
|
|
||||||
final ColumnSelectorBitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector(bitmapFactory, index);
|
|
||||||
baseFilter = filter.getBitmapIndex(selector);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (DimensionSpec dimension : dimsToSearch) {
|
for (DimensionSpec dimension : dimsToSearch) {
|
||||||
final Column column = index.getColumn(dimension.getDimension());
|
final Column column = index.getColumn(dimension.getDimension());
|
||||||
|
@ -127,9 +123,19 @@ public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
|
||||||
if (bitmapIndex != null) {
|
if (bitmapIndex != null) {
|
||||||
for (int i = 0; i < bitmapIndex.getCardinality(); ++i) {
|
for (int i = 0; i < bitmapIndex.getCardinality(); ++i) {
|
||||||
String dimVal = Strings.nullToEmpty(extractionFn.apply(bitmapIndex.getValue(i)));
|
String dimVal = Strings.nullToEmpty(extractionFn.apply(bitmapIndex.getValue(i)));
|
||||||
if (searchQuerySpec.accept(dimVal) &&
|
if (!searchQuerySpec.accept(dimVal)) {
|
||||||
bitmapFactory.intersection(Arrays.asList(baseFilter, bitmapIndex.getBitmap(i))).size() > 0) {
|
continue;
|
||||||
retVal.add(new SearchHit(dimension.getOutputName(), dimVal));
|
}
|
||||||
|
ImmutableBitmap bitmap = bitmapIndex.getBitmap(i);
|
||||||
|
if (baseFilter != null) {
|
||||||
|
bitmap = bitmapFactory.intersection(Arrays.asList(baseFilter, bitmap));
|
||||||
|
}
|
||||||
|
if (bitmap.size() > 0) {
|
||||||
|
MutableInt counter = new MutableInt(bitmap.size());
|
||||||
|
MutableInt prev = retVal.put(new SearchHit(dimension.getOutputName(), dimVal), counter);
|
||||||
|
if (prev != null) {
|
||||||
|
counter.add(prev.intValue());
|
||||||
|
}
|
||||||
if (retVal.size() >= limit) {
|
if (retVal.size() >= limit) {
|
||||||
return makeReturnResult(limit, retVal);
|
return makeReturnResult(limit, retVal);
|
||||||
}
|
}
|
||||||
|
@ -161,12 +167,12 @@ public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
|
||||||
|
|
||||||
final Sequence<Cursor> cursors = adapter.makeCursors(filter, segment.getDataInterval(), QueryGranularity.ALL, descending);
|
final Sequence<Cursor> cursors = adapter.makeCursors(filter, segment.getDataInterval(), QueryGranularity.ALL, descending);
|
||||||
|
|
||||||
final TreeSet<SearchHit> retVal = cursors.accumulate(
|
final TreeMap<SearchHit, MutableInt> retVal = cursors.accumulate(
|
||||||
Sets.newTreeSet(query.getSort().getComparator()),
|
Maps.<SearchHit, SearchHit, MutableInt>newTreeMap(query.getSort().getComparator()),
|
||||||
new Accumulator<TreeSet<SearchHit>, Cursor>()
|
new Accumulator<TreeMap<SearchHit, MutableInt>, Cursor>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public TreeSet<SearchHit> accumulate(TreeSet<SearchHit> set, Cursor cursor)
|
public TreeMap<SearchHit, MutableInt> accumulate(TreeMap<SearchHit, MutableInt> set, Cursor cursor)
|
||||||
{
|
{
|
||||||
if (set.size() >= limit) {
|
if (set.size() >= limit) {
|
||||||
return set;
|
return set;
|
||||||
|
@ -189,7 +195,11 @@ public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
|
||||||
for (int i = 0; i < vals.size(); ++i) {
|
for (int i = 0; i < vals.size(); ++i) {
|
||||||
final String dimVal = selector.lookupName(vals.get(i));
|
final String dimVal = selector.lookupName(vals.get(i));
|
||||||
if (searchQuerySpec.accept(dimVal)) {
|
if (searchQuerySpec.accept(dimVal)) {
|
||||||
set.add(new SearchHit(entry.getKey(), dimVal));
|
MutableInt counter = new MutableInt(1);
|
||||||
|
MutableInt prev = set.put(new SearchHit(entry.getKey(), dimVal), counter);
|
||||||
|
if (prev != null) {
|
||||||
|
counter.add(prev.intValue());
|
||||||
|
}
|
||||||
if (set.size() >= limit) {
|
if (set.size() >= limit) {
|
||||||
return set;
|
return set;
|
||||||
}
|
}
|
||||||
|
@ -209,14 +219,26 @@ public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
|
||||||
return makeReturnResult(limit, retVal);
|
return makeReturnResult(limit, retVal);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Sequence<Result<SearchResultValue>> makeReturnResult(int limit, TreeSet<SearchHit> retVal)
|
private Sequence<Result<SearchResultValue>> makeReturnResult(
|
||||||
|
int limit, TreeMap<SearchHit, MutableInt> retVal)
|
||||||
{
|
{
|
||||||
|
Iterable<SearchHit> source = Iterables.transform(
|
||||||
|
retVal.entrySet(), new Function<Map.Entry<SearchHit, MutableInt>, SearchHit>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public SearchHit apply(Map.Entry<SearchHit, MutableInt> input)
|
||||||
|
{
|
||||||
|
SearchHit hit = input.getKey();
|
||||||
|
return new SearchHit(hit.getDimension(), hit.getValue(), input.getValue().intValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
return Sequences.simple(
|
return Sequences.simple(
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
new Result<SearchResultValue>(
|
new Result<SearchResultValue>(
|
||||||
segment.getDataInterval().getStart(),
|
segment.getDataInterval().getStart(),
|
||||||
new SearchResultValue(
|
new SearchResultValue(
|
||||||
Lists.newArrayList(new FunctionalIterable<SearchHit>(retVal).limit(limit))
|
Lists.newArrayList(new FunctionalIterable<SearchHit>(source).limit(limit))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
|
@ -50,6 +50,12 @@ public class LexicographicSearchSortSpec implements SearchSortSpec
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public byte[] getCacheKey()
|
||||||
|
{
|
||||||
|
return toString().getBytes();
|
||||||
|
}
|
||||||
|
|
||||||
public String toString()
|
public String toString()
|
||||||
{
|
{
|
||||||
return "lexicographicSort";
|
return "lexicographicSort";
|
||||||
|
|
|
@ -30,15 +30,23 @@ public class SearchHit implements Comparable<SearchHit>
|
||||||
{
|
{
|
||||||
private final String dimension;
|
private final String dimension;
|
||||||
private final String value;
|
private final String value;
|
||||||
|
private final Integer count;
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public SearchHit(
|
public SearchHit(
|
||||||
@JsonProperty("dimension") String dimension,
|
@JsonProperty("dimension") String dimension,
|
||||||
@JsonProperty("value") String value
|
@JsonProperty("value") String value,
|
||||||
|
@JsonProperty("count") Integer count
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.dimension = checkNotNull(dimension);
|
this.dimension = checkNotNull(dimension);
|
||||||
this.value = checkNotNull(value);
|
this.value = checkNotNull(value);
|
||||||
|
this.count = count;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchHit(String dimension, String value)
|
||||||
|
{
|
||||||
|
this(dimension, value, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
|
@ -53,6 +61,12 @@ public class SearchHit implements Comparable<SearchHit>
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
public Integer getCount()
|
||||||
|
{
|
||||||
|
return count;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int compareTo(SearchHit o)
|
public int compareTo(SearchHit o)
|
||||||
{
|
{
|
||||||
|
@ -99,6 +113,7 @@ public class SearchHit implements Comparable<SearchHit>
|
||||||
return "Hit{" +
|
return "Hit{" +
|
||||||
"dimension='" + dimension + '\'' +
|
"dimension='" + dimension + '\'' +
|
||||||
", value='" + value + '\'' +
|
", value='" + value + '\'' +
|
||||||
|
(count != null ? ", count='" + count + '\'' : "") +
|
||||||
'}';
|
'}';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,5 +33,7 @@ import java.util.Comparator;
|
||||||
})
|
})
|
||||||
public interface SearchSortSpec
|
public interface SearchSortSpec
|
||||||
{
|
{
|
||||||
public Comparator<SearchHit> getComparator();
|
Comparator<SearchHit> getComparator();
|
||||||
|
|
||||||
|
byte[] getCacheKey();
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,6 +52,12 @@ public class StrlenSearchSortSpec implements SearchSortSpec
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public byte[] getCacheKey()
|
||||||
|
{
|
||||||
|
return toString().getBytes();
|
||||||
|
}
|
||||||
|
|
||||||
public String toString()
|
public String toString()
|
||||||
{
|
{
|
||||||
return "stringLengthSort";
|
return "stringLengthSort";
|
||||||
|
|
|
@ -102,6 +102,8 @@ public class QueryRunnerTestHelper
|
||||||
public static final String qualityDimension = "quality";
|
public static final String qualityDimension = "quality";
|
||||||
public static final String placementDimension = "placement";
|
public static final String placementDimension = "placement";
|
||||||
public static final String placementishDimension = "placementish";
|
public static final String placementishDimension = "placementish";
|
||||||
|
public static final String partialNullDimension = "partial_null_column";
|
||||||
|
|
||||||
public static final List<String> dimensions = Lists.newArrayList(
|
public static final List<String> dimensions = Lists.newArrayList(
|
||||||
marketDimension,
|
marketDimension,
|
||||||
qualityDimension,
|
qualityDimension,
|
||||||
|
|
|
@ -30,6 +30,8 @@ import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Comparator;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -220,22 +222,25 @@ public class SearchBinaryFnTest
|
||||||
@Test
|
@Test
|
||||||
public void testStrlenMerge()
|
public void testStrlenMerge()
|
||||||
{
|
{
|
||||||
|
StrlenSearchSortSpec searchSortSpec = new StrlenSearchSortSpec();
|
||||||
|
Comparator<SearchHit> c = searchSortSpec.getComparator();
|
||||||
|
|
||||||
Result<SearchResultValue> r1 = new Result<SearchResultValue>(
|
Result<SearchResultValue> r1 = new Result<SearchResultValue>(
|
||||||
currTime,
|
currTime,
|
||||||
new SearchResultValue(toHits("blah:thisislong"))
|
new SearchResultValue(toHits(c, "blah:thisislong"))
|
||||||
);
|
);
|
||||||
|
|
||||||
Result<SearchResultValue> r2 = new Result<SearchResultValue>(
|
Result<SearchResultValue> r2 = new Result<SearchResultValue>(
|
||||||
currTime,
|
currTime,
|
||||||
new SearchResultValue(toHits("blah:short"))
|
new SearchResultValue(toHits(c, "blah:short"))
|
||||||
);
|
);
|
||||||
|
|
||||||
Result<SearchResultValue> expected = new Result<SearchResultValue>(
|
Result<SearchResultValue> expected = new Result<SearchResultValue>(
|
||||||
currTime,
|
currTime,
|
||||||
new SearchResultValue(toHits("blah:short", "blah:thisislong"))
|
new SearchResultValue(toHits(c, "blah:short", "blah:thisislong"))
|
||||||
);
|
);
|
||||||
|
|
||||||
Result<SearchResultValue> actual = new SearchBinaryFn(new StrlenSearchSortSpec(), QueryGranularity.ALL, Integer.MAX_VALUE).apply(r1, r2);
|
Result<SearchResultValue> actual = new SearchBinaryFn(searchSortSpec, QueryGranularity.ALL, Integer.MAX_VALUE).apply(r1, r2);
|
||||||
Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp());
|
Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp());
|
||||||
assertSearchMergeResult(expected.getValue(), actual.getValue());
|
assertSearchMergeResult(expected.getValue(), actual.getValue());
|
||||||
}
|
}
|
||||||
|
@ -243,33 +248,37 @@ public class SearchBinaryFnTest
|
||||||
@Test
|
@Test
|
||||||
public void testStrlenMerge2()
|
public void testStrlenMerge2()
|
||||||
{
|
{
|
||||||
|
StrlenSearchSortSpec searchSortSpec = new StrlenSearchSortSpec();
|
||||||
|
Comparator<SearchHit> c = searchSortSpec.getComparator();
|
||||||
|
|
||||||
Result<SearchResultValue> r1 = new Result<SearchResultValue>(
|
Result<SearchResultValue> r1 = new Result<SearchResultValue>(
|
||||||
currTime,
|
currTime,
|
||||||
new SearchResultValue(toHits("blah:thisislong", "blah:short", "blah2:thisislong"))
|
new SearchResultValue(toHits(c, "blah:short", "blah:thisislong", "blah2:thisislong"))
|
||||||
);
|
);
|
||||||
|
|
||||||
Result<SearchResultValue> r2 = new Result<SearchResultValue>(
|
Result<SearchResultValue> r2 = new Result<SearchResultValue>(
|
||||||
currTime,
|
currTime,
|
||||||
new SearchResultValue(toHits("blah:short", "blah2:thisislong"))
|
new SearchResultValue(toHits(c, "blah:short", "blah2:thisislong"))
|
||||||
);
|
);
|
||||||
|
|
||||||
Result<SearchResultValue> expected = new Result<SearchResultValue>(
|
Result<SearchResultValue> expected = new Result<SearchResultValue>(
|
||||||
currTime,
|
currTime,
|
||||||
new SearchResultValue(toHits("blah:short", "blah:thisislong", "blah2:thisislong"))
|
new SearchResultValue(toHits(c, "blah:short", "blah:thisislong", "blah2:thisislong"))
|
||||||
);
|
);
|
||||||
|
|
||||||
Result<SearchResultValue> actual = new SearchBinaryFn(new StrlenSearchSortSpec(), QueryGranularity.ALL, Integer.MAX_VALUE).apply(r1, r2);
|
Result<SearchResultValue> actual = new SearchBinaryFn(searchSortSpec, QueryGranularity.ALL, Integer.MAX_VALUE).apply(r1, r2);
|
||||||
Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp());
|
Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp());
|
||||||
System.out.println("[SearchBinaryFnTest/testStrlenMerge2] " + actual.getValue());
|
|
||||||
assertSearchMergeResult(expected.getValue(), actual.getValue());
|
assertSearchMergeResult(expected.getValue(), actual.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<SearchHit> toHits(String... hits) {
|
// merge function expects input to be sorted as per comparator
|
||||||
|
private List<SearchHit> toHits(Comparator<SearchHit> comparator, String... hits) {
|
||||||
List<SearchHit> result = new ArrayList<>();
|
List<SearchHit> result = new ArrayList<>();
|
||||||
for (String hit : hits) {
|
for (String hit : hits) {
|
||||||
int index = hit.indexOf(':');
|
int index = hit.indexOf(':');
|
||||||
result.add(new SearchHit(hit.substring(0, index), hit.substring(index + 1)));
|
result.add(new SearchHit(hit.substring(0, index), hit.substring(index + 1)));
|
||||||
}
|
}
|
||||||
|
Collections.sort(result, comparator);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,25 +19,34 @@
|
||||||
|
|
||||||
package io.druid.query.search;
|
package io.druid.query.search;
|
||||||
|
|
||||||
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.metamx.common.guava.Sequence;
|
||||||
import com.google.common.collect.Sets;
|
|
||||||
import com.metamx.common.guava.Sequences;
|
import com.metamx.common.guava.Sequences;
|
||||||
|
import com.metamx.common.logger.Logger;
|
||||||
import io.druid.query.Druids;
|
import io.druid.query.Druids;
|
||||||
|
import io.druid.query.Query;
|
||||||
import io.druid.query.QueryRunner;
|
import io.druid.query.QueryRunner;
|
||||||
import io.druid.query.QueryRunnerTestHelper;
|
import io.druid.query.QueryRunnerTestHelper;
|
||||||
import io.druid.query.Result;
|
import io.druid.query.Result;
|
||||||
import io.druid.query.dimension.ExtractionDimensionSpec;
|
import io.druid.query.dimension.ExtractionDimensionSpec;
|
||||||
import io.druid.query.extraction.LookupExtractionFn;
|
import io.druid.query.extraction.LookupExtractionFn;
|
||||||
import io.druid.query.extraction.MapLookupExtractor;
|
import io.druid.query.extraction.MapLookupExtractor;
|
||||||
|
import io.druid.query.filter.AndDimFilter;
|
||||||
import io.druid.query.filter.DimFilter;
|
import io.druid.query.filter.DimFilter;
|
||||||
import io.druid.query.filter.ExtractionDimFilter;
|
import io.druid.query.filter.ExtractionDimFilter;
|
||||||
|
import io.druid.query.filter.RegexDimFilter;
|
||||||
|
import io.druid.query.filter.SelectorDimFilter;
|
||||||
import io.druid.query.search.search.FragmentSearchQuerySpec;
|
import io.druid.query.search.search.FragmentSearchQuerySpec;
|
||||||
import io.druid.query.search.search.SearchHit;
|
import io.druid.query.search.search.SearchHit;
|
||||||
import io.druid.query.search.search.SearchQuery;
|
import io.druid.query.search.search.SearchQuery;
|
||||||
import io.druid.query.search.search.SearchQueryConfig;
|
import io.druid.query.search.search.SearchQueryConfig;
|
||||||
|
import io.druid.query.search.search.StrlenSearchSortSpec;
|
||||||
|
import io.druid.query.spec.MultipleIntervalSegmentSpec;
|
||||||
|
import io.druid.segment.TestHelper;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
|
import org.joda.time.Interval;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
|
@ -45,27 +54,27 @@ import org.junit.runners.Parameterized;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
@RunWith(Parameterized.class)
|
@RunWith(Parameterized.class)
|
||||||
public class SearchQueryRunnerTest
|
public class SearchQueryRunnerTest
|
||||||
{
|
{
|
||||||
|
private static final Logger LOG = new Logger(SearchQueryRunnerTest.class);
|
||||||
|
private static final SearchQueryQueryToolChest toolChest = new SearchQueryQueryToolChest(
|
||||||
|
new SearchQueryConfig(),
|
||||||
|
QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()
|
||||||
|
);
|
||||||
|
|
||||||
@Parameterized.Parameters
|
@Parameterized.Parameters
|
||||||
public static Iterable<Object[]> constructorFeeder() throws IOException
|
public static Iterable<Object[]> constructorFeeder() throws IOException
|
||||||
{
|
{
|
||||||
return QueryRunnerTestHelper.transformToConstructionFeeder(
|
return QueryRunnerTestHelper.transformToConstructionFeeder(
|
||||||
QueryRunnerTestHelper.makeQueryRunners(
|
QueryRunnerTestHelper.makeQueryRunners(
|
||||||
new SearchQueryRunnerFactory(
|
new SearchQueryRunnerFactory(
|
||||||
new SearchQueryQueryToolChest(
|
toolChest,
|
||||||
new SearchQueryConfig(),
|
|
||||||
QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()
|
|
||||||
),
|
|
||||||
QueryRunnerTestHelper.NOOP_QUERYWATCHER
|
QueryRunnerTestHelper.NOOP_QUERYWATCHER
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -81,6 +90,23 @@ public class SearchQueryRunnerTest
|
||||||
this.runner = runner;
|
this.runner = runner;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSearchHitSerDe() throws Exception
|
||||||
|
{
|
||||||
|
for (SearchHit hit : Arrays.asList(new SearchHit("dim1", "val1"), new SearchHit("dim2", "val2", 3))) {
|
||||||
|
SearchHit read = TestHelper.JSON_MAPPER.readValue(
|
||||||
|
TestHelper.JSON_MAPPER.writeValueAsString(hit),
|
||||||
|
SearchHit.class
|
||||||
|
);
|
||||||
|
Assert.assertEquals(hit, read);
|
||||||
|
if (hit.getCount() == null) {
|
||||||
|
Assert.assertNull(read.getCount());
|
||||||
|
} else {
|
||||||
|
Assert.assertEquals(hit.getCount(), read.getCount());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearch()
|
public void testSearch()
|
||||||
{
|
{
|
||||||
|
@ -91,15 +117,60 @@ public class SearchQueryRunnerTest
|
||||||
.query("a")
|
.query("a")
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93));
|
||||||
QueryRunnerTestHelper.qualityDimension,
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 279));
|
||||||
Sets.newHashSet("automotive", "mezzanine", "travel", "health", "entertainment")
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 93));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 93));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 93));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "a", 93));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.partialNullDimension, "value", 186));
|
||||||
|
|
||||||
|
checkSearchQuery(searchQuery, expectedHits);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSearchWithCardinality()
|
||||||
|
{
|
||||||
|
final SearchQuery searchQuery = Druids.newSearchQueryBuilder()
|
||||||
|
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||||
|
.granularity(QueryRunnerTestHelper.allGran)
|
||||||
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
|
.query("a")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
// double the value
|
||||||
|
QueryRunner mergedRunner = toolChest.mergeResults(
|
||||||
|
new QueryRunner<Result<SearchResultValue>>()
|
||||||
|
{
|
||||||
|
@Override
|
||||||
|
public Sequence<Result<SearchResultValue>> run(
|
||||||
|
Query<Result<SearchResultValue>> query, Map<String, Object> responseContext
|
||||||
|
)
|
||||||
|
{
|
||||||
|
final Query<Result<SearchResultValue>> query1 = searchQuery.withQuerySegmentSpec(
|
||||||
|
new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-01-12/2011-02-28")))
|
||||||
|
);
|
||||||
|
final Query<Result<SearchResultValue>> query2 = searchQuery.withQuerySegmentSpec(
|
||||||
|
new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-03-01/2011-04-15")))
|
||||||
|
);
|
||||||
|
return Sequences.concat(runner.run(query1, responseContext), runner.run(query2, responseContext));
|
||||||
|
}
|
||||||
|
}
|
||||||
);
|
);
|
||||||
expectedResults.put(QueryRunnerTestHelper.marketDimension, Sets.newHashSet("total_market"));
|
|
||||||
expectedResults.put(QueryRunnerTestHelper.placementishDimension, Sets.newHashSet("a"));
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put("partial_null_column", Sets.newHashSet("value"));
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 186));
|
||||||
checkSearchQuery(searchQuery, expectedResults);
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 558));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 186));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 186));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 186));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 372));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "a", 186));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.partialNullDimension, "value", 372));
|
||||||
|
|
||||||
|
checkSearchQuery(searchQuery, mergedRunner, expectedHits);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -118,11 +189,37 @@ public class SearchQueryRunnerTest
|
||||||
.query("e")
|
.query("e")
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(QueryRunnerTestHelper.placementDimension, Sets.newHashSet("preferred"));
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementDimension, "preferred", 1209));
|
||||||
expectedResults.put(QueryRunnerTestHelper.placementishDimension, Sets.newHashSet("e", "preferred"));
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "e", 93));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "preferred", 1209));
|
||||||
|
|
||||||
checkSearchQuery(searchQuery, expectedResults);
|
checkSearchQuery(searchQuery, expectedHits);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSearchSameValueInMultiDims2()
|
||||||
|
{
|
||||||
|
SearchQuery searchQuery = Druids.newSearchQueryBuilder()
|
||||||
|
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||||
|
.granularity(QueryRunnerTestHelper.allGran)
|
||||||
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
|
.dimensions(
|
||||||
|
Arrays.asList(
|
||||||
|
QueryRunnerTestHelper.placementDimension,
|
||||||
|
QueryRunnerTestHelper.placementishDimension
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.sortSpec(new StrlenSearchSortSpec())
|
||||||
|
.query("e")
|
||||||
|
.build();
|
||||||
|
|
||||||
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "e", 93));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementDimension, "preferred", 1209));
|
||||||
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "preferred", 1209));
|
||||||
|
|
||||||
|
checkSearchQuery(searchQuery, expectedHits);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -135,23 +232,21 @@ public class SearchQueryRunnerTest
|
||||||
.query(new FragmentSearchQuerySpec(Arrays.asList("auto", "ve")))
|
.query(new FragmentSearchQuerySpec(Arrays.asList("auto", "ve")))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("automotive"));
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93));
|
||||||
|
|
||||||
checkSearchQuery(searchQuery, expectedResults);
|
checkSearchQuery(searchQuery, expectedHits);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithDimensionQuality()
|
public void testSearchWithDimensionQuality()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = new HashMap<String, Set<String>>();
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93));
|
||||||
QueryRunnerTestHelper.qualityDimension, new HashSet<String>(
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 279));
|
||||||
Arrays.asList(
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 93));
|
||||||
"automotive", "mezzanine", "travel", "health", "entertainment"
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 93));
|
||||||
)
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 93));
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
checkSearchQuery(
|
checkSearchQuery(
|
||||||
Druids.newSearchQueryBuilder()
|
Druids.newSearchQueryBuilder()
|
||||||
|
@ -161,15 +256,15 @@ public class SearchQueryRunnerTest
|
||||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
.query("a")
|
.query("a")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithDimensionProvider()
|
public void testSearchWithDimensionProvider()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(QueryRunnerTestHelper.marketDimension, new HashSet<String>(Arrays.asList("total_market")));
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186));
|
||||||
|
|
||||||
checkSearchQuery(
|
checkSearchQuery(
|
||||||
Druids.newSearchQueryBuilder()
|
Druids.newSearchQueryBuilder()
|
||||||
|
@ -179,28 +274,20 @@ public class SearchQueryRunnerTest
|
||||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
.query("a")
|
.query("a")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithDimensionsQualityAndProvider()
|
public void testSearchWithDimensionsQualityAndProvider()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.putAll(
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93));
|
||||||
ImmutableMap.<String, Set<String>>of(
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 279));
|
||||||
QueryRunnerTestHelper.qualityDimension,
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 93));
|
||||||
new HashSet<String>(
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 93));
|
||||||
Arrays.asList(
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 93));
|
||||||
"automotive", "mezzanine", "travel", "health", "entertainment"
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186));
|
||||||
)
|
|
||||||
),
|
|
||||||
QueryRunnerTestHelper.marketDimension,
|
|
||||||
new HashSet<String>(
|
|
||||||
Arrays.asList("total_market")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
checkSearchQuery(
|
checkSearchQuery(
|
||||||
Druids.newSearchQueryBuilder()
|
Druids.newSearchQueryBuilder()
|
||||||
|
@ -215,15 +302,15 @@ public class SearchQueryRunnerTest
|
||||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
.query("a")
|
.query("a")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithDimensionsPlacementAndProvider()
|
public void testSearchWithDimensionsPlacementAndProvider()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(QueryRunnerTestHelper.marketDimension, new HashSet<String>(Arrays.asList("total_market")));
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186));
|
||||||
|
|
||||||
checkSearchQuery(
|
checkSearchQuery(
|
||||||
Druids.newSearchQueryBuilder()
|
Druids.newSearchQueryBuilder()
|
||||||
|
@ -238,7 +325,7 @@ public class SearchQueryRunnerTest
|
||||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
.query("mark")
|
.query("mark")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -247,25 +334,29 @@ public class SearchQueryRunnerTest
|
||||||
public void testSearchWithExtractionFilter1()
|
public void testSearchWithExtractionFilter1()
|
||||||
{
|
{
|
||||||
final String automotiveSnowman = "automotive☃";
|
final String automotiveSnowman = "automotive☃";
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, automotiveSnowman, 93));
|
||||||
QueryRunnerTestHelper.qualityDimension, new HashSet<String>(Arrays.asList(automotiveSnowman))
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
final LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(
|
final LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(
|
||||||
new MapLookupExtractor(ImmutableMap.of("automotive", automotiveSnowman), false),
|
new MapLookupExtractor(ImmutableMap.of("automotive", automotiveSnowman), false),
|
||||||
true,
|
true,
|
||||||
null,
|
null,
|
||||||
true,
|
true,
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
checkSearchQuery(
|
checkSearchQuery(
|
||||||
Druids.newSearchQueryBuilder()
|
Druids.newSearchQueryBuilder()
|
||||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.allGran)
|
||||||
.filters(new ExtractionDimFilter(QueryRunnerTestHelper.qualityDimension, automotiveSnowman, lookupExtractionFn, null))
|
.filters(
|
||||||
|
new ExtractionDimFilter(
|
||||||
|
QueryRunnerTestHelper.qualityDimension,
|
||||||
|
automotiveSnowman,
|
||||||
|
lookupExtractionFn,
|
||||||
|
null
|
||||||
|
)
|
||||||
|
)
|
||||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
.dimensions(
|
.dimensions(
|
||||||
new ExtractionDimensionSpec(
|
new ExtractionDimensionSpec(
|
||||||
|
@ -277,36 +368,38 @@ public class SearchQueryRunnerTest
|
||||||
)
|
)
|
||||||
.query("☃")
|
.query("☃")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithSingleFilter1()
|
public void testSearchWithSingleFilter1()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 93));
|
||||||
QueryRunnerTestHelper.qualityDimension, new HashSet<String>(Arrays.asList("automotive"))
|
|
||||||
);
|
|
||||||
|
|
||||||
checkSearchQuery(
|
checkSearchQuery(
|
||||||
Druids.newSearchQueryBuilder()
|
Druids.newSearchQueryBuilder()
|
||||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.allGran)
|
||||||
.filters(QueryRunnerTestHelper.qualityDimension, "automotive")
|
.filters(
|
||||||
|
new AndDimFilter(
|
||||||
|
Arrays.<DimFilter>asList(
|
||||||
|
new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "total_market"),
|
||||||
|
new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "mezzanine"))))
|
||||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
.dimensions(QueryRunnerTestHelper.qualityDimension)
|
.dimensions(QueryRunnerTestHelper.qualityDimension)
|
||||||
.query("a")
|
.query("a")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithSingleFilter2()
|
public void testSearchWithSingleFilter2()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(QueryRunnerTestHelper.marketDimension, new HashSet<String>(Arrays.asList("total_market")));
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186));
|
||||||
|
|
||||||
checkSearchQuery(
|
checkSearchQuery(
|
||||||
Druids.newSearchQueryBuilder()
|
Druids.newSearchQueryBuilder()
|
||||||
|
@ -317,15 +410,15 @@ public class SearchQueryRunnerTest
|
||||||
.dimensions(QueryRunnerTestHelper.marketDimension)
|
.dimensions(QueryRunnerTestHelper.marketDimension)
|
||||||
.query("a")
|
.query("a")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchMultiAndFilter()
|
public void testSearchMultiAndFilter()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(QueryRunnerTestHelper.qualityDimension, new HashSet<String>(Arrays.asList("automotive")));
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93));
|
||||||
|
|
||||||
DimFilter filter = Druids.newAndDimFilterBuilder()
|
DimFilter filter = Druids.newAndDimFilterBuilder()
|
||||||
.fields(
|
.fields(
|
||||||
|
@ -351,15 +444,15 @@ public class SearchQueryRunnerTest
|
||||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
.query("a")
|
.query("a")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithMultiOrFilter()
|
public void testSearchWithMultiOrFilter()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
expectedResults.put(QueryRunnerTestHelper.qualityDimension, new HashSet<String>(Arrays.asList("automotive")));
|
expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93));
|
||||||
|
|
||||||
DimFilter filter = Druids.newOrDimFilterBuilder()
|
DimFilter filter = Druids.newOrDimFilterBuilder()
|
||||||
.fields(
|
.fields(
|
||||||
|
@ -385,14 +478,14 @@ public class SearchQueryRunnerTest
|
||||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
.query("a")
|
.query("a")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithEmptyResults()
|
public void testSearchWithEmptyResults()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
|
|
||||||
checkSearchQuery(
|
checkSearchQuery(
|
||||||
Druids.newSearchQueryBuilder()
|
Druids.newSearchQueryBuilder()
|
||||||
|
@ -401,14 +494,14 @@ public class SearchQueryRunnerTest
|
||||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
.query("abcd123")
|
.query("abcd123")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchWithFilterEmptyResults()
|
public void testSearchWithFilterEmptyResults()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
|
|
||||||
DimFilter filter = Druids.newAndDimFilterBuilder()
|
DimFilter filter = Druids.newAndDimFilterBuilder()
|
||||||
.fields(
|
.fields(
|
||||||
|
@ -433,7 +526,7 @@ public class SearchQueryRunnerTest
|
||||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||||
.query("a")
|
.query("a")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -441,7 +534,7 @@ public class SearchQueryRunnerTest
|
||||||
@Test
|
@Test
|
||||||
public void testSearchNonExistingDimension()
|
public void testSearchNonExistingDimension()
|
||||||
{
|
{
|
||||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
List<SearchHit> expectedHits = Lists.newLinkedList();
|
||||||
|
|
||||||
checkSearchQuery(
|
checkSearchQuery(
|
||||||
Druids.newSearchQueryBuilder()
|
Druids.newSearchQueryBuilder()
|
||||||
|
@ -451,45 +544,64 @@ public class SearchQueryRunnerTest
|
||||||
.dimensions("does_not_exist")
|
.dimensions("does_not_exist")
|
||||||
.query("a")
|
.query("a")
|
||||||
.build(),
|
.build(),
|
||||||
expectedResults
|
expectedHits
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkSearchQuery(SearchQuery searchQuery, Map<String, Set<String>> expectedResults)
|
private void checkSearchQuery(Query searchQuery, List<SearchHit> expectedResults)
|
||||||
|
{
|
||||||
|
checkSearchQuery(searchQuery, runner, expectedResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkSearchQuery(Query searchQuery, QueryRunner runner, List<SearchHit> expectedResults)
|
||||||
{
|
{
|
||||||
HashMap<String,List> context = new HashMap<String, List>();
|
|
||||||
Iterable<Result<SearchResultValue>> results = Sequences.toList(
|
Iterable<Result<SearchResultValue>> results = Sequences.toList(
|
||||||
runner.run(searchQuery, context),
|
runner.run(searchQuery, ImmutableMap.of()),
|
||||||
Lists.<Result<SearchResultValue>>newArrayList()
|
Lists.<Result<SearchResultValue>>newArrayList()
|
||||||
);
|
);
|
||||||
|
List<SearchHit> copy = ImmutableList.copyOf(expectedResults);
|
||||||
for (Result<SearchResultValue> result : results) {
|
for (Result<SearchResultValue> result : results) {
|
||||||
Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), result.getTimestamp());
|
Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), result.getTimestamp());
|
||||||
Assert.assertTrue(result.getValue() instanceof Iterable);
|
Assert.assertTrue(result.getValue() instanceof Iterable);
|
||||||
|
|
||||||
Iterable<SearchHit> resultValues = result.getValue();
|
Iterable<SearchHit> resultValues = result.getValue();
|
||||||
for (SearchHit resultValue : resultValues) {
|
for (SearchHit resultValue : resultValues) {
|
||||||
String dimension = resultValue.getDimension();
|
int index = expectedResults.indexOf(resultValue);
|
||||||
String theValue = resultValue.getValue();
|
if (index < 0) {
|
||||||
Assert.assertTrue(
|
fail(
|
||||||
String.format("Result had unknown dimension[%s]", dimension),
|
copy, results,
|
||||||
expectedResults.containsKey(dimension)
|
"No result found containing " + resultValue.getDimension() + " and " + resultValue.getValue()
|
||||||
);
|
);
|
||||||
|
}
|
||||||
Set<String> expectedSet = expectedResults.get(dimension);
|
SearchHit expected = expectedResults.remove(index);
|
||||||
Assert.assertTrue(
|
if (!resultValue.toString().equals(expected.toString())) {
|
||||||
String.format("Couldn't remove dim[%s], value[%s]", dimension, theValue), expectedSet.remove(theValue)
|
fail(
|
||||||
);
|
copy, results,
|
||||||
|
"Invalid count for " + resultValue + ".. which was expected to be " + expected.getCount()
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (!expectedResults.isEmpty()) {
|
||||||
for (Map.Entry<String, Set<String>> entry : expectedResults.entrySet()) {
|
fail(copy, results, "Some expected results are not shown: " + expectedResults);
|
||||||
Assert.assertTrue(
|
|
||||||
String.format(
|
|
||||||
"Dimension[%s] should have had everything removed, still has[%s]", entry.getKey(), entry.getValue()
|
|
||||||
),
|
|
||||||
entry.getValue().isEmpty()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void fail(
|
||||||
|
List<SearchHit> expectedResults,
|
||||||
|
Iterable<Result<SearchResultValue>> results, String errorMsg
|
||||||
|
)
|
||||||
|
{
|
||||||
|
LOG.info("Expected..");
|
||||||
|
for (SearchHit expected : expectedResults) {
|
||||||
|
LOG.info(expected.toString());
|
||||||
|
}
|
||||||
|
LOG.info("Result..");
|
||||||
|
for (Result<SearchResultValue> r : results) {
|
||||||
|
for (SearchHit v : r.getValue()) {
|
||||||
|
LOG.info(v.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Assert.fail(errorMsg);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue