mirror of https://github.com/apache/druid.git
1) Rewrite SearchQueryRunner to not require StorageAdapter to be "Searchable"
2) Extract SearchQueryRunner out of SearchQueryRunnerFactory 3) Extract ColumnSelectorBitmapIndexSelector out to make it reusable
This commit is contained in:
parent
1f37e962f6
commit
c02d887cfe
|
@ -1,18 +1,50 @@
|
|||
grammar DruidSQL;
|
||||
|
||||
@header {
|
||||
import com.metamx.druid.aggregation.post.*;
|
||||
import com.metamx.druid.aggregation.*;
|
||||
import com.metamx.druid.query.filter.*;
|
||||
import com.metamx.druid.query.dimension.*;
|
||||
import com.metamx.druid.*;
|
||||
|
||||
import com.google.common.base.*;
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.joda.time.*;
|
||||
import com.metamx.druid.aggregation.CountAggregatorFactory;
|
||||
import com.metamx.druid.aggregation.DoubleSumAggregatorFactory;
|
||||
import com.metamx.druid.aggregation.MaxAggregatorFactory;
|
||||
import com.metamx.druid.aggregation.MinAggregatorFactory;
|
||||
import com.metamx.druid.aggregation.post.ArithmeticPostAggregator;
|
||||
import com.metamx.druid.aggregation.post.ConstantPostAggregator;
|
||||
import com.metamx.druid.aggregation.post.FieldAccessPostAggregator;
|
||||
import com.metamx.druid.aggregation.post.PostAggregator;
|
||||
import com.metamx.druid.query.dimension.DefaultDimensionSpec;
|
||||
import com.metamx.druid.query.dimension.DimensionSpec;
|
||||
import com.metamx.druid.query.filter.AndDimFilter;
|
||||
import com.metamx.druid.query.filter.DimFilter;
|
||||
import com.metamx.druid.query.filter.NotDimFilter;
|
||||
import com.metamx.druid.query.filter.OrDimFilter;
|
||||
import com.metamx.druid.query.filter.RegexDimFilter;
|
||||
import com.metamx.druid.query.filter.SelectorDimFilter;
|
||||
import io.druid.granularity.PeriodGranularity;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import org.antlr.v4.runtime.NoViableAltException;
|
||||
import org.antlr.v4.runtime.Parser;
|
||||
import org.antlr.v4.runtime.ParserRuleContext;
|
||||
import org.antlr.v4.runtime.RecognitionException;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenStream;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ATNSimulator;
|
||||
import org.antlr.v4.runtime.atn.ParserATNSimulator;
|
||||
import org.antlr.v4.runtime.atn.PredictionContextCache;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.tree.ParseTreeListener;
|
||||
import org.antlr.v4.runtime.tree.TerminalNode;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Period;
|
||||
|
||||
import java.text.*;
|
||||
import java.util.*;
|
||||
import java.text.NumberFormat;
|
||||
import java.text.ParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
}
|
||||
|
||||
@parser::members {
|
||||
|
|
|
@ -524,7 +524,7 @@ public class Druids
|
|||
querySegmentSpec,
|
||||
dimensions,
|
||||
querySpec,
|
||||
querySpec.getSearchSortSpec(),
|
||||
null,
|
||||
context
|
||||
);
|
||||
}
|
||||
|
@ -639,13 +639,13 @@ public class Druids
|
|||
|
||||
public SearchQueryBuilder query(String q)
|
||||
{
|
||||
querySpec = new InsensitiveContainsSearchQuerySpec(q, null);
|
||||
querySpec = new InsensitiveContainsSearchQuerySpec(q);
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchQueryBuilder query(Map<String, Object> q)
|
||||
{
|
||||
querySpec = new InsensitiveContainsSearchQuerySpec((String) q.get("value"), null);
|
||||
querySpec = new InsensitiveContainsSearchQuerySpec((String) q.get("value"));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -34,12 +34,10 @@ public class FragmentSearchQuerySpec implements SearchQuerySpec
|
|||
private static final byte CACHE_TYPE_ID = 0x2;
|
||||
|
||||
private final List<String> values;
|
||||
private final SearchSortSpec sortSpec;
|
||||
|
||||
@JsonCreator
|
||||
public FragmentSearchQuerySpec(
|
||||
@JsonProperty("values") List<String> values,
|
||||
@JsonProperty("sort") SearchSortSpec sortSpec
|
||||
@JsonProperty("values") List<String> values
|
||||
)
|
||||
{
|
||||
this.values = Lists.transform(
|
||||
|
@ -53,7 +51,6 @@ public class FragmentSearchQuerySpec implements SearchQuerySpec
|
|||
}
|
||||
}
|
||||
);
|
||||
this.sortSpec = (sortSpec == null) ? new LexicographicSearchSortSpec() : sortSpec;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
|
@ -62,13 +59,6 @@ public class FragmentSearchQuerySpec implements SearchQuerySpec
|
|||
return values;
|
||||
}
|
||||
|
||||
@JsonProperty("sort")
|
||||
@Override
|
||||
public SearchSortSpec getSearchSortSpec()
|
||||
{
|
||||
return sortSpec;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean accept(String dimVal)
|
||||
{
|
||||
|
@ -107,7 +97,6 @@ public class FragmentSearchQuerySpec implements SearchQuerySpec
|
|||
{
|
||||
return "FragmentSearchQuerySpec{" +
|
||||
"values=" + values +
|
||||
", sortSpec=" + sortSpec +
|
||||
"}";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,16 +31,13 @@ public class InsensitiveContainsSearchQuerySpec implements SearchQuerySpec
|
|||
private static final byte CACHE_TYPE_ID = 0x1;
|
||||
|
||||
private final String value;
|
||||
private final SearchSortSpec sortSpec;
|
||||
|
||||
@JsonCreator
|
||||
public InsensitiveContainsSearchQuerySpec(
|
||||
@JsonProperty("value") String value,
|
||||
@JsonProperty("sort") SearchSortSpec sortSpec
|
||||
@JsonProperty("value") String value
|
||||
)
|
||||
{
|
||||
this.value = value.toLowerCase();
|
||||
this.sortSpec = (sortSpec == null) ? new LexicographicSearchSortSpec() : sortSpec;
|
||||
}
|
||||
|
||||
@JsonProperty
|
||||
|
@ -49,13 +46,6 @@ public class InsensitiveContainsSearchQuerySpec implements SearchQuerySpec
|
|||
return value;
|
||||
}
|
||||
|
||||
@JsonProperty("sort")
|
||||
@Override
|
||||
public SearchSortSpec getSearchSortSpec()
|
||||
{
|
||||
return sortSpec;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean accept(String dimVal)
|
||||
{
|
||||
|
@ -81,7 +71,6 @@ public class InsensitiveContainsSearchQuerySpec implements SearchQuerySpec
|
|||
{
|
||||
return "InsensitiveContainsSearchQuerySpec{" +
|
||||
"value=" + value +
|
||||
", sortSpec=" + sortSpec +
|
||||
"}";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ public class SearchQuery extends BaseQuery<Result<SearchResultValue>>
|
|||
{
|
||||
super(dataSource, querySegmentSpec, context);
|
||||
this.dimFilter = dimFilter;
|
||||
this.sortSpec = sortSpec;
|
||||
this.sortSpec = sortSpec == null ? new LexicographicSearchSortSpec() : sortSpec;
|
||||
this.granularity = granularity == null ? QueryGranularity.ALL : granularity;
|
||||
this.limit = (limit == 0) ? 1000 : limit;
|
||||
this.dimensions = (dimensions == null) ? null : Lists.transform(
|
||||
|
@ -159,7 +159,7 @@ public class SearchQuery extends BaseQuery<Result<SearchResultValue>>
|
|||
@JsonProperty("sort")
|
||||
public SearchSortSpec getSort()
|
||||
{
|
||||
return sortSpec == null ? querySpec.getSearchSortSpec() : sortSpec;
|
||||
return sortSpec;
|
||||
}
|
||||
|
||||
public SearchQuery withLimit(int newLimit)
|
||||
|
|
|
@ -31,18 +31,6 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
|||
})
|
||||
public interface SearchQuerySpec
|
||||
{
|
||||
/**
|
||||
* Deprecated!
|
||||
*
|
||||
* This has been moved to the SearchQuery and is only still here for backwards compatibility purposes. Search
|
||||
* queries should be adjusted to use the sort parameter on the SearchQuery object itself rather than on this
|
||||
* object. This method will eventually go away.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
@Deprecated
|
||||
public SearchSortSpec getSearchSortSpec();
|
||||
|
||||
public boolean accept(String dimVal);
|
||||
|
||||
public byte[] getCacheKey();
|
||||
|
|
|
@ -21,7 +21,6 @@ package com.metamx.druid.query.extraction;
|
|||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.metamx.druid.query.search.FragmentSearchQuerySpec;
|
||||
import com.metamx.druid.query.search.LexicographicSearchSortSpec;
|
||||
import com.metamx.druid.query.search.SearchQuerySpec;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
@ -49,7 +48,7 @@ public class SearchQuerySpecDimExtractionFnTest
|
|||
public void testExtraction()
|
||||
{
|
||||
SearchQuerySpec spec = new FragmentSearchQuerySpec(
|
||||
Arrays.asList("to", "yo"), new LexicographicSearchSortSpec()
|
||||
Arrays.asList("to", "yo")
|
||||
);
|
||||
DimExtractionFn dimExtractionFn = new SearchQuerySpecDimExtractionFn(spec);
|
||||
List<String> expected = Arrays.asList("Kyoto", "Tokyo", "Toyokawa", "Yorktown");
|
||||
|
|
|
@ -1,93 +0,0 @@
|
|||
/*
|
||||
* Druid - a distributed column store.
|
||||
* Copyright (C) 2012 Metamarkets Group Inc.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
package com.metamx.druid;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
import com.metamx.common.guava.FunctionalIterable;
|
||||
import com.metamx.druid.index.v1.ConciseOffset;
|
||||
import com.metamx.druid.index.v1.processing.IntersectingOffset;
|
||||
import com.metamx.druid.index.v1.processing.Offset;
|
||||
import com.metamx.druid.query.search.SearchHit;
|
||||
import com.metamx.druid.query.search.SearchQuery;
|
||||
import com.metamx.druid.query.search.SearchQuerySpec;
|
||||
import io.druid.data.Indexed;
|
||||
import io.druid.query.filter.Filter;
|
||||
import io.druid.segment.StorageAdapter;
|
||||
import it.uniroma3.mat.extendedset.intset.ImmutableConciseSet;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.TreeSet;
|
||||
|
||||
/**
|
||||
*/
|
||||
public abstract class BaseStorageAdapter implements StorageAdapter
|
||||
{
|
||||
public abstract Indexed<String> getAvailableDimensions();
|
||||
|
||||
public abstract Indexed<String> getDimValueLookup(String dimension);
|
||||
|
||||
public abstract ImmutableConciseSet getInvertedIndex(String dimension, String dimVal);
|
||||
|
||||
public abstract ImmutableConciseSet getInvertedIndex(String dimension, int idx);
|
||||
|
||||
public abstract Offset getFilterOffset(Filter filter);
|
||||
|
||||
@Override
|
||||
public Iterable<SearchHit> searchDimensions(final SearchQuery query, final Filter filter)
|
||||
{
|
||||
final List<String> dimensions = query.getDimensions();
|
||||
final SearchQuerySpec searchQuerySpec = query.getQuery();
|
||||
|
||||
final TreeSet<SearchHit> retVal = Sets.newTreeSet(query.getSort().getComparator());
|
||||
|
||||
Iterable<String> dimsToSearch;
|
||||
if (dimensions == null || dimensions.isEmpty()) {
|
||||
dimsToSearch = getAvailableDimensions();
|
||||
} else {
|
||||
dimsToSearch = dimensions;
|
||||
}
|
||||
|
||||
Offset filterOffset = (filter == null) ? null : getFilterOffset(filter);
|
||||
|
||||
for (String dimension : dimsToSearch) {
|
||||
Iterable<String> dims = getDimValueLookup(dimension);
|
||||
if (dims != null) {
|
||||
for (String dimVal : dims) {
|
||||
dimVal = dimVal == null ? "" : dimVal;
|
||||
if (searchQuerySpec.accept(dimVal)) {
|
||||
if (filterOffset != null) {
|
||||
Offset lhs = new ConciseOffset(getInvertedIndex(dimension, dimVal));
|
||||
Offset rhs = filterOffset.clone();
|
||||
|
||||
if (new IntersectingOffset(lhs, rhs).withinBounds()) {
|
||||
retVal.add(new SearchHit(dimension, dimVal));
|
||||
}
|
||||
} else {
|
||||
retVal.add(new SearchHit(dimension, dimVal));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new FunctionalIterable<SearchHit>(retVal).limit(query.getLimit());
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,143 @@
|
|||
/*
|
||||
* Druid - a distributed column store.
|
||||
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
package com.metamx.druid.index.v1;
|
||||
|
||||
import com.google.common.io.Closeables;
|
||||
import com.metamx.collections.spatial.ImmutableRTree;
|
||||
import com.metamx.druid.kv.IndexedIterable;
|
||||
import io.druid.data.Indexed;
|
||||
import io.druid.query.filter.BitmapIndexSelector;
|
||||
import io.druid.segment.ColumnSelector;
|
||||
import io.druid.segment.column.Column;
|
||||
import io.druid.segment.column.DictionaryEncodedColumn;
|
||||
import io.druid.segment.column.GenericColumn;
|
||||
import it.uniroma3.mat.extendedset.intset.ImmutableConciseSet;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class ColumnSelectorBitmapIndexSelector implements BitmapIndexSelector
|
||||
{
|
||||
private final ColumnSelector index;
|
||||
|
||||
public ColumnSelectorBitmapIndexSelector(
|
||||
final ColumnSelector index
|
||||
)
|
||||
{
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Indexed<String> getDimensionValues(String dimension)
|
||||
{
|
||||
final Column columnDesc = index.getColumn(dimension.toLowerCase());
|
||||
if (columnDesc == null || !columnDesc.getCapabilities().isDictionaryEncoded()) {
|
||||
return null;
|
||||
}
|
||||
final DictionaryEncodedColumn column = columnDesc.getDictionaryEncoding();
|
||||
return new Indexed<String>()
|
||||
{
|
||||
@Override
|
||||
public Class<? extends String> getClazz()
|
||||
{
|
||||
return String.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size()
|
||||
{
|
||||
return column.getCardinality();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String get(int index)
|
||||
{
|
||||
return column.lookupName(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int indexOf(String value)
|
||||
{
|
||||
return column.lookupId(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<String> iterator()
|
||||
{
|
||||
return IndexedIterable.create(this).iterator();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumRows()
|
||||
{
|
||||
GenericColumn column = null;
|
||||
try {
|
||||
column = index.getTimeColumn().getGenericColumn();
|
||||
return column.length();
|
||||
}
|
||||
finally {
|
||||
Closeables.closeQuietly(column);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableConciseSet getConciseInvertedIndex(String dimension, String value)
|
||||
{
|
||||
final Column column = index.getColumn(dimension.toLowerCase());
|
||||
if (column == null) {
|
||||
return new ImmutableConciseSet();
|
||||
}
|
||||
if (!column.getCapabilities().hasBitmapIndexes()) {
|
||||
return new ImmutableConciseSet();
|
||||
}
|
||||
|
||||
return column.getBitmapIndex().getConciseSet(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableConciseSet getConciseInvertedIndex(String dimension, int idx)
|
||||
{
|
||||
final Column column = index.getColumn(dimension.toLowerCase());
|
||||
if (column == null) {
|
||||
return new ImmutableConciseSet();
|
||||
}
|
||||
if (!column.getCapabilities().hasBitmapIndexes()) {
|
||||
return new ImmutableConciseSet();
|
||||
}
|
||||
// This is a workaround given the current state of indexing, I feel shame
|
||||
final int index1 = column.getBitmapIndex().hasNulls() ? idx + 1 : idx;
|
||||
|
||||
return column.getBitmapIndex().getConciseSet(index1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableRTree getSpatialIndex(String dimension)
|
||||
{
|
||||
final Column column = index.getColumn(dimension.toLowerCase());
|
||||
if (column == null || !column.getCapabilities().hasSpatialIndexes()) {
|
||||
return new ImmutableRTree();
|
||||
}
|
||||
|
||||
return column.getSpatialIndex().getRTree();
|
||||
}
|
||||
}
|
|
@ -25,17 +25,13 @@ import com.google.common.base.Splitter;
|
|||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Iterators;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.metamx.collections.spatial.search.Bound;
|
||||
import com.metamx.common.IAE;
|
||||
import com.metamx.common.guava.FunctionalIterable;
|
||||
import com.metamx.common.guava.FunctionalIterator;
|
||||
import com.metamx.druid.index.brita.BooleanValueMatcher;
|
||||
import com.metamx.druid.index.v1.serde.ComplexMetricSerde;
|
||||
import com.metamx.druid.index.v1.serde.ComplexMetrics;
|
||||
import com.metamx.druid.query.search.SearchHit;
|
||||
import com.metamx.druid.query.search.SearchQuery;
|
||||
import com.metamx.druid.query.search.SearchQuerySpec;
|
||||
import com.metamx.druid.kv.ListIndexed;
|
||||
import io.druid.data.Indexed;
|
||||
import io.druid.data.IndexedInts;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.aggregation.Aggregator;
|
||||
|
@ -57,7 +53,6 @@ import java.util.ArrayList;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.ConcurrentNavigableMap;
|
||||
|
||||
/**
|
||||
|
@ -87,6 +82,12 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter
|
|||
return index.getInterval();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Indexed<String> getAvailableDimensions()
|
||||
{
|
||||
return new ListIndexed<String>(index.getDimensions(), String.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getDimensionCardinality(String dimension)
|
||||
{
|
||||
|
@ -432,92 +433,6 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter
|
|||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<SearchHit> searchDimensions(final SearchQuery query, final Filter filter)
|
||||
{
|
||||
final List<String> dimensions = query.getDimensions();
|
||||
final int[] dimensionIndexes;
|
||||
final String[] dimensionNames;
|
||||
final List<String> dimensionOrder = index.getDimensions();
|
||||
if (dimensions == null || dimensions.isEmpty()) {
|
||||
dimensionIndexes = new int[dimensionOrder.size()];
|
||||
dimensionNames = new String[dimensionIndexes.length];
|
||||
|
||||
Iterator<String> dimensionOrderIter = dimensionOrder.iterator();
|
||||
for (int i = 0; i < dimensionIndexes.length; ++i) {
|
||||
dimensionNames[i] = dimensionOrderIter.next();
|
||||
dimensionIndexes[i] = index.getDimensionIndex(dimensionNames[i]);
|
||||
}
|
||||
} else {
|
||||
int[] tmpDimensionIndexes = new int[dimensions.size()];
|
||||
String[] tmpDimensionNames = new String[dimensions.size()];
|
||||
int i = 0;
|
||||
for (String dimension : dimensions) {
|
||||
Integer dimIndex = index.getDimensionIndex(dimension.toLowerCase());
|
||||
if (dimIndex != null) {
|
||||
tmpDimensionNames[i] = dimension;
|
||||
tmpDimensionIndexes[i] = dimIndex;
|
||||
++i;
|
||||
}
|
||||
}
|
||||
|
||||
if (i != tmpDimensionIndexes.length) {
|
||||
dimensionIndexes = new int[i];
|
||||
dimensionNames = new String[i];
|
||||
System.arraycopy(tmpDimensionIndexes, 0, dimensionIndexes, 0, i);
|
||||
System.arraycopy(tmpDimensionNames, 0, dimensionNames, 0, i);
|
||||
} else {
|
||||
dimensionIndexes = tmpDimensionIndexes;
|
||||
dimensionNames = tmpDimensionNames;
|
||||
}
|
||||
}
|
||||
|
||||
final List<Interval> queryIntervals = query.getIntervals();
|
||||
if (queryIntervals.size() != 1) {
|
||||
throw new IAE("Can only handle one interval, got query[%s]", query);
|
||||
}
|
||||
|
||||
final Interval queryInterval = queryIntervals.get(0);
|
||||
final long intervalStart = queryInterval.getStartMillis();
|
||||
final long intervalEnd = queryInterval.getEndMillis();
|
||||
|
||||
final EntryHolder holder = new EntryHolder();
|
||||
final ValueMatcher theMatcher = makeFilterMatcher(filter, holder);
|
||||
final SearchQuerySpec searchQuerySpec = query.getQuery();
|
||||
final TreeSet<SearchHit> retVal = Sets.newTreeSet(query.getSort().getComparator());
|
||||
|
||||
ConcurrentNavigableMap<IncrementalIndex.TimeAndDims, Aggregator[]> facts = index.getSubMap(
|
||||
new IncrementalIndex.TimeAndDims(intervalStart, new String[][]{}),
|
||||
new IncrementalIndex.TimeAndDims(intervalEnd, new String[][]{})
|
||||
);
|
||||
|
||||
for (Map.Entry<IncrementalIndex.TimeAndDims, Aggregator[]> entry : facts.entrySet()) {
|
||||
holder.set(entry);
|
||||
final IncrementalIndex.TimeAndDims key = holder.getKey();
|
||||
final long timestamp = key.getTimestamp();
|
||||
|
||||
if (timestamp >= intervalStart && timestamp < intervalEnd && theMatcher.matches()) {
|
||||
final String[][] dims = key.getDims();
|
||||
|
||||
for (int i = 0; i < dimensionIndexes.length; ++i) {
|
||||
if (dimensionIndexes[i] < dims.length) {
|
||||
final String[] dimVals = dims[dimensionIndexes[i]];
|
||||
if (dimVals != null) {
|
||||
for (int j = 0; j < dimVals.length; ++j) {
|
||||
if (searchQuerySpec.accept(dimVals[j])) {
|
||||
retVal.add(new SearchHit(dimensionNames[i], dimVals[j]));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return new FunctionalIterable<SearchHit>(retVal).limit(query.getLimit());
|
||||
}
|
||||
|
||||
private ValueMatcher makeFilterMatcher(final Filter filter, final EntryHolder holder)
|
||||
{
|
||||
return filter == null
|
||||
|
|
|
@ -24,18 +24,14 @@ import com.google.common.base.Functions;
|
|||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.io.Closeables;
|
||||
import com.metamx.collections.spatial.ImmutableRTree;
|
||||
import com.metamx.common.collect.MoreIterators;
|
||||
import com.metamx.common.guava.FunctionalIterable;
|
||||
import com.metamx.common.guava.FunctionalIterator;
|
||||
import com.metamx.druid.BaseStorageAdapter;
|
||||
import com.metamx.druid.index.v1.processing.Offset;
|
||||
import com.metamx.druid.kv.IndexedIterable;
|
||||
import com.metamx.druid.kv.SingleIndexedInts;
|
||||
import io.druid.data.Indexed;
|
||||
import io.druid.data.IndexedInts;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.filter.BitmapIndexSelector;
|
||||
import io.druid.query.filter.Filter;
|
||||
import io.druid.segment.Capabilities;
|
||||
import io.druid.segment.ColumnSelector;
|
||||
|
@ -45,13 +41,13 @@ import io.druid.segment.DimensionSelector;
|
|||
import io.druid.segment.FloatMetricSelector;
|
||||
import io.druid.segment.ObjectMetricSelector;
|
||||
import io.druid.segment.QueryableIndex;
|
||||
import io.druid.segment.StorageAdapter;
|
||||
import io.druid.segment.column.Column;
|
||||
import io.druid.segment.column.ColumnCapabilities;
|
||||
import io.druid.segment.column.ComplexColumn;
|
||||
import io.druid.segment.column.DictionaryEncodedColumn;
|
||||
import io.druid.segment.column.GenericColumn;
|
||||
import io.druid.segment.column.ValueType;
|
||||
import it.uniroma3.mat.extendedset.intset.ImmutableConciseSet;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
|
||||
|
@ -61,7 +57,7 @@ import java.util.Map;
|
|||
|
||||
/**
|
||||
*/
|
||||
public class QueryableIndexStorageAdapter extends BaseStorageAdapter
|
||||
public class QueryableIndexStorageAdapter implements StorageAdapter
|
||||
{
|
||||
private final QueryableIndex index;
|
||||
|
||||
|
@ -84,6 +80,12 @@ public class QueryableIndexStorageAdapter extends BaseStorageAdapter
|
|||
return index.getDataInterval();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Indexed<String> getAvailableDimensions()
|
||||
{
|
||||
return index.getAvailableDimensions();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getDimensionCardinality(String dimension)
|
||||
{
|
||||
|
@ -155,7 +157,7 @@ public class QueryableIndexStorageAdapter extends BaseStorageAdapter
|
|||
if (filter == null) {
|
||||
iterable = new NoFilterCursorIterable(index, actualInterval, gran);
|
||||
} else {
|
||||
Offset offset = new ConciseOffset(filter.goConcise(new MMappedBitmapIndexSelector(index)));
|
||||
Offset offset = new ConciseOffset(filter.goConcise(new ColumnSelectorBitmapIndexSelector(index)));
|
||||
|
||||
iterable = new CursorIterable(index, actualInterval, gran, offset);
|
||||
}
|
||||
|
@ -163,102 +165,6 @@ public class QueryableIndexStorageAdapter extends BaseStorageAdapter
|
|||
return FunctionalIterable.create(iterable).keep(Functions.<Cursor>identity());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Indexed<String> getAvailableDimensions()
|
||||
{
|
||||
return index.getAvailableDimensions();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Indexed<String> getDimValueLookup(String dimension)
|
||||
{
|
||||
final Column column = index.getColumn(dimension.toLowerCase());
|
||||
|
||||
if (column == null || !column.getCapabilities().isDictionaryEncoded()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final DictionaryEncodedColumn dictionary = column.getDictionaryEncoding();
|
||||
return new Indexed<String>()
|
||||
{
|
||||
@Override
|
||||
public Class<? extends String> getClazz()
|
||||
{
|
||||
return String.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size()
|
||||
{
|
||||
return dictionary.getCardinality();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String get(int index)
|
||||
{
|
||||
return dictionary.lookupName(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int indexOf(String value)
|
||||
{
|
||||
return dictionary.lookupId(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<String> iterator()
|
||||
{
|
||||
return IndexedIterable.create(this).iterator();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableConciseSet getInvertedIndex(String dimension, String dimVal)
|
||||
{
|
||||
final Column column = index.getColumn(dimension.toLowerCase());
|
||||
if (column == null) {
|
||||
return new ImmutableConciseSet();
|
||||
}
|
||||
if (!column.getCapabilities().hasBitmapIndexes()) {
|
||||
return new ImmutableConciseSet();
|
||||
}
|
||||
|
||||
return column.getBitmapIndex().getConciseSet(dimVal);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableConciseSet getInvertedIndex(String dimension, int idx)
|
||||
{
|
||||
final Column column = index.getColumn(dimension.toLowerCase());
|
||||
if (column == null) {
|
||||
return new ImmutableConciseSet();
|
||||
}
|
||||
if (!column.getCapabilities().hasBitmapIndexes()) {
|
||||
return new ImmutableConciseSet();
|
||||
}
|
||||
// This is a workaround given the current state of indexing, I feel shame
|
||||
final int index = column.getBitmapIndex().hasNulls() ? idx + 1 : idx;
|
||||
|
||||
return column.getBitmapIndex().getConciseSet(index);
|
||||
}
|
||||
|
||||
public ImmutableRTree getRTreeSpatialIndex(String dimension)
|
||||
{
|
||||
final Column column = index.getColumn(dimension.toLowerCase());
|
||||
if (column == null || !column.getCapabilities().hasSpatialIndexes()) {
|
||||
return new ImmutableRTree();
|
||||
}
|
||||
|
||||
return column.getSpatialIndex().getRTree();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Offset getFilterOffset(Filter filter)
|
||||
{
|
||||
return new ConciseOffset(filter.goConcise(new MMappedBitmapIndexSelector(index)));
|
||||
}
|
||||
|
||||
private static class CursorIterable implements Iterable<Cursor>
|
||||
{
|
||||
private final ColumnSelector index;
|
||||
|
@ -1081,87 +987,4 @@ public class QueryableIndexStorageAdapter extends BaseStorageAdapter
|
|||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private class MMappedBitmapIndexSelector implements BitmapIndexSelector
|
||||
{
|
||||
private final ColumnSelector index;
|
||||
|
||||
public MMappedBitmapIndexSelector(final ColumnSelector index)
|
||||
{
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Indexed<String> getDimensionValues(String dimension)
|
||||
{
|
||||
final Column columnDesc = index.getColumn(dimension.toLowerCase());
|
||||
if (columnDesc == null || !columnDesc.getCapabilities().isDictionaryEncoded()) {
|
||||
return null;
|
||||
}
|
||||
final DictionaryEncodedColumn column = columnDesc.getDictionaryEncoding();
|
||||
return new Indexed<String>()
|
||||
{
|
||||
@Override
|
||||
public Class<? extends String> getClazz()
|
||||
{
|
||||
return String.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size()
|
||||
{
|
||||
return column.getCardinality();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String get(int index)
|
||||
{
|
||||
return column.lookupName(index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int indexOf(String value)
|
||||
{
|
||||
return column.lookupId(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<String> iterator()
|
||||
{
|
||||
return IndexedIterable.create(this).iterator();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getNumRows()
|
||||
{
|
||||
GenericColumn column = null;
|
||||
try {
|
||||
column = index.getTimeColumn().getGenericColumn();
|
||||
return column.length();
|
||||
}
|
||||
finally {
|
||||
Closeables.closeQuietly(column);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableConciseSet getConciseInvertedIndex(String dimension, String value)
|
||||
{
|
||||
return getInvertedIndex(dimension, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableConciseSet getConciseInvertedIndex(String dimension, int idx)
|
||||
{
|
||||
return getInvertedIndex(dimension, idx);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableRTree getSpatialIndex(String dimension)
|
||||
{
|
||||
return getRTreeSpatialIndex(dimension);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,100 +0,0 @@
|
|||
/*
|
||||
* Druid - a distributed column store.
|
||||
* Copyright (C) 2012 Metamarkets Group Inc.
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software
|
||||
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
package com.metamx.druid.index.v1;
|
||||
|
||||
import com.metamx.druid.query.search.SearchHit;
|
||||
import com.metamx.druid.query.search.SearchQuery;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.filter.Filter;
|
||||
import io.druid.segment.Capabilities;
|
||||
import io.druid.segment.Cursor;
|
||||
import io.druid.segment.StorageAdapter;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class SegmentIdAttachedStorageAdapter implements StorageAdapter
|
||||
{
|
||||
private final String segmentId;
|
||||
private final StorageAdapter delegate;
|
||||
|
||||
public SegmentIdAttachedStorageAdapter(
|
||||
String segmentId,
|
||||
StorageAdapter delegate
|
||||
)
|
||||
{
|
||||
this.segmentId = segmentId;
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSegmentIdentifier()
|
||||
{
|
||||
return segmentId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Interval getInterval()
|
||||
{
|
||||
return delegate.getInterval();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<SearchHit> searchDimensions(SearchQuery query, Filter filter)
|
||||
{
|
||||
return delegate.searchDimensions(query, filter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterable<Cursor> makeCursors(Filter filter, Interval interval, QueryGranularity gran)
|
||||
{
|
||||
return delegate.makeCursors(filter, interval, gran);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Capabilities getCapabilities()
|
||||
{
|
||||
return delegate.getCapabilities();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateTime getMaxTime()
|
||||
{
|
||||
return delegate.getMaxTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateTime getMinTime()
|
||||
{
|
||||
return delegate.getMinTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getDimensionCardinality(String dimension)
|
||||
{
|
||||
return delegate.getDimensionCardinality(dimension);
|
||||
}
|
||||
|
||||
public StorageAdapter getDelegate()
|
||||
{
|
||||
return delegate;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,165 @@
|
|||
package com.metamx.druid.query.search;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.metamx.common.ISE;
|
||||
import com.metamx.common.guava.FunctionalIterable;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import com.metamx.common.guava.Sequences;
|
||||
import com.metamx.druid.index.brita.Filters;
|
||||
import com.metamx.druid.index.v1.ColumnSelectorBitmapIndexSelector;
|
||||
import com.metamx.druid.result.Result;
|
||||
import com.metamx.druid.result.SearchResultValue;
|
||||
import com.metamx.emitter.EmittingLogger;
|
||||
import io.druid.data.IndexedInts;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.filter.Filter;
|
||||
import io.druid.segment.Cursor;
|
||||
import io.druid.segment.DimensionSelector;
|
||||
import io.druid.segment.QueryableIndex;
|
||||
import io.druid.segment.Segment;
|
||||
import io.druid.segment.StorageAdapter;
|
||||
import io.druid.segment.column.BitmapIndex;
|
||||
import io.druid.segment.column.Column;
|
||||
import it.uniroma3.mat.extendedset.intset.ImmutableConciseSet;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeSet;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
|
||||
{
|
||||
private static final EmittingLogger log = new EmittingLogger(SearchQueryRunner.class);
|
||||
|
||||
private final Segment segment;
|
||||
|
||||
public SearchQueryRunner(Segment segment)
|
||||
{
|
||||
this.segment = segment;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Sequence<Result<SearchResultValue>> run(final Query<Result<SearchResultValue>> input)
|
||||
{
|
||||
if (!(input instanceof SearchQuery)) {
|
||||
throw new ISE("Got a [%s] which isn't a %s", input.getClass(), SearchQuery.class);
|
||||
}
|
||||
|
||||
final SearchQuery query = (SearchQuery) input;
|
||||
final Filter filter = Filters.convertDimensionFilters(query.getDimensionsFilter());
|
||||
final List<String> dimensions = query.getDimensions();
|
||||
final SearchQuerySpec searchQuerySpec = query.getQuery();
|
||||
final int limit = query.getLimit();
|
||||
|
||||
final QueryableIndex index = segment.asQueryableIndex();
|
||||
if (index != null) {
|
||||
final TreeSet<SearchHit> retVal = Sets.newTreeSet(query.getSort().getComparator());
|
||||
|
||||
Iterable<String> dimsToSearch;
|
||||
if (dimensions == null || dimensions.isEmpty()) {
|
||||
dimsToSearch = index.getAvailableDimensions();
|
||||
} else {
|
||||
dimsToSearch = dimensions;
|
||||
}
|
||||
|
||||
|
||||
final ImmutableConciseSet baseFilter;
|
||||
if (filter == null) {
|
||||
// Accept all
|
||||
baseFilter = ImmutableConciseSet.complement(new ImmutableConciseSet(), index.getNumRows());
|
||||
}
|
||||
else {
|
||||
baseFilter = filter.goConcise(new ColumnSelectorBitmapIndexSelector(index));
|
||||
}
|
||||
|
||||
for (String dimension : dimsToSearch) {
|
||||
final Column column = index.getColumn(dimension.toLowerCase());
|
||||
if (column == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
final BitmapIndex bitmapIndex = column.getBitmapIndex();
|
||||
if (bitmapIndex != null) {
|
||||
for (int i = 0; i < bitmapIndex.getCardinality(); ++i) {
|
||||
String dimVal = Strings.nullToEmpty(bitmapIndex.getValue(i));
|
||||
if (searchQuerySpec.accept(dimVal) &&
|
||||
ImmutableConciseSet.intersection(baseFilter, bitmapIndex.getConciseSet(i)).size() > 0) {
|
||||
retVal.add(new SearchHit(dimension, dimVal));
|
||||
if (retVal.size() >= limit) {
|
||||
return makeReturnResult(limit, retVal);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return makeReturnResult(limit, retVal);
|
||||
}
|
||||
|
||||
final StorageAdapter adapter = segment.asStorageAdapter();
|
||||
if (adapter != null) {
|
||||
Iterable<String> dimsToSearch;
|
||||
if (dimensions == null || dimensions.isEmpty()) {
|
||||
dimsToSearch = adapter.getAvailableDimensions();
|
||||
} else {
|
||||
dimsToSearch = dimensions;
|
||||
}
|
||||
|
||||
final TreeSet<SearchHit> retVal = Sets.newTreeSet(query.getSort().getComparator());
|
||||
|
||||
final Iterable<Cursor> cursors = adapter.makeCursors(filter, segment.getDataInterval(), QueryGranularity.ALL);
|
||||
for (Cursor cursor : cursors) {
|
||||
Map<String, DimensionSelector> dimSelectors = Maps.newHashMap();
|
||||
for (String dim : dimsToSearch) {
|
||||
dimSelectors.put(dim, cursor.makeDimensionSelector(dim));
|
||||
}
|
||||
|
||||
while (!cursor.isDone()) {
|
||||
for (Map.Entry<String, DimensionSelector> entry : dimSelectors.entrySet()) {
|
||||
final DimensionSelector selector = entry.getValue();
|
||||
final IndexedInts vals = selector.getRow();
|
||||
for (int i = 0; i < vals.size(); ++i) {
|
||||
final String dimVal = selector.lookupName(vals.get(i));
|
||||
if (searchQuerySpec.accept(dimVal)) {
|
||||
retVal.add(new SearchHit(entry.getKey(), dimVal));
|
||||
if (retVal.size() >= limit) {
|
||||
return makeReturnResult(limit, retVal);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cursor.advance();
|
||||
}
|
||||
}
|
||||
|
||||
return makeReturnResult(limit, retVal);
|
||||
}
|
||||
|
||||
log.makeAlert("WTF!? Unable to process search query on segment.")
|
||||
.addData("segment", segment.getIdentifier())
|
||||
.addData("query", query);
|
||||
return Sequences.empty();
|
||||
}
|
||||
|
||||
private Sequence<Result<SearchResultValue>> makeReturnResult(int limit, TreeSet<SearchHit> retVal)
|
||||
{
|
||||
return Sequences.simple(
|
||||
ImmutableList.of(
|
||||
new Result<SearchResultValue>(
|
||||
segment.getDataInterval().getStart(),
|
||||
new SearchResultValue(
|
||||
Lists.newArrayList(new FunctionalIterable<SearchHit>(retVal).limit(limit))
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
|
@ -19,24 +19,15 @@
|
|||
|
||||
package com.metamx.druid.query.search;
|
||||
|
||||
import com.google.common.collect.Iterators;
|
||||
import com.google.inject.Inject;
|
||||
import com.metamx.common.ISE;
|
||||
import com.metamx.common.guava.BaseSequence;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import com.metamx.druid.SearchResultBuilder;
|
||||
import com.metamx.druid.index.brita.Filters;
|
||||
import com.metamx.druid.query.ChainedExecutionQueryRunner;
|
||||
import com.metamx.druid.result.Result;
|
||||
import com.metamx.druid.result.SearchResultValue;
|
||||
import io.druid.query.Query;
|
||||
import io.druid.query.QueryRunner;
|
||||
import io.druid.query.QueryRunnerFactory;
|
||||
import io.druid.query.QueryToolChest;
|
||||
import io.druid.segment.Segment;
|
||||
import io.druid.segment.StorageAdapter;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
/**
|
||||
|
@ -74,49 +65,4 @@ public class SearchQueryRunnerFactory implements QueryRunnerFactory<Result<Searc
|
|||
{
|
||||
return toolChest;
|
||||
}
|
||||
|
||||
private static class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
|
||||
{
|
||||
private final StorageAdapter adapter;
|
||||
|
||||
public SearchQueryRunner(Segment segment)
|
||||
{
|
||||
this.adapter = segment.asStorageAdapter();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Sequence<Result<SearchResultValue>> run(final Query<Result<SearchResultValue>> input)
|
||||
{
|
||||
if (!(input instanceof SearchQuery)) {
|
||||
throw new ISE("Got a [%s] which isn't a %s", input.getClass(), SearchQuery.class);
|
||||
}
|
||||
|
||||
final SearchQuery query = (SearchQuery) input;
|
||||
|
||||
return new BaseSequence<Result<SearchResultValue>, Iterator<Result<SearchResultValue>>>(
|
||||
new BaseSequence.IteratorMaker<Result<SearchResultValue>, Iterator<Result<SearchResultValue>>>()
|
||||
{
|
||||
@Override
|
||||
public Iterator<Result<SearchResultValue>> make()
|
||||
{
|
||||
return Iterators.singletonIterator(
|
||||
new SearchResultBuilder(
|
||||
adapter.getInterval().getStart(),
|
||||
adapter.searchDimensions(
|
||||
query,
|
||||
Filters.convertDimensionFilters(query.getDimensionsFilter())
|
||||
)
|
||||
).build()
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cleanup(Iterator<Result<SearchResultValue>> toClean)
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ public class SearchQueryRunnerTest
|
|||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.intervals(QueryRunnerTestHelper.fullOnInterval)
|
||||
.query(new FragmentSearchQuerySpec(Arrays.asList("auto", "ve"), null))
|
||||
.query(new FragmentSearchQuerySpec(Arrays.asList("auto", "ve")))
|
||||
.build();
|
||||
|
||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
||||
|
@ -104,7 +104,7 @@ public class SearchQueryRunnerTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testSearchWithDimension1()
|
||||
public void testSearchWithDimensionQuality()
|
||||
{
|
||||
Map<String, Set<String>> expectedResults = new HashMap<String, Set<String>>();
|
||||
expectedResults.put(
|
||||
|
@ -128,7 +128,7 @@ public class SearchQueryRunnerTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testSearchWithDimension2()
|
||||
public void testSearchWithDimensionProvider()
|
||||
{
|
||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
||||
expectedResults.put(QueryRunnerTestHelper.providerDimension, new HashSet<String>(Arrays.asList("total_market")));
|
||||
|
@ -146,7 +146,7 @@ public class SearchQueryRunnerTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testSearchWithDimensions1()
|
||||
public void testSearchWithDimensionsQualityAndProvider()
|
||||
{
|
||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
||||
expectedResults.putAll(
|
||||
|
@ -182,7 +182,7 @@ public class SearchQueryRunnerTest
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testSearchWithDimensions2()
|
||||
public void testSearchWithDimensionsPlacementAndProvider()
|
||||
{
|
||||
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
||||
expectedResults.put(QueryRunnerTestHelper.providerDimension, new HashSet<String>(Arrays.asList("total_market")));
|
||||
|
@ -390,7 +390,7 @@ public class SearchQueryRunnerTest
|
|||
for (Map.Entry<String, Set<String>> entry : expectedResults.entrySet()) {
|
||||
Assert.assertTrue(
|
||||
String.format(
|
||||
"Dimension %s should have had everything removed, still has[%s]", entry.getKey(), entry.getValue()
|
||||
"Dimension[%s] should have had everything removed, still has[%s]", entry.getKey(), entry.getValue()
|
||||
),
|
||||
entry.getValue().isEmpty()
|
||||
);
|
||||
|
|
Loading…
Reference in New Issue