adding decorate(DimensionSelector) to DimensionSpec to enable support for arbitrary filtering/transformations to returned dimension values

This commit is contained in:
Himanshu Gupta 2015-12-19 02:58:54 -06:00
parent 3c107c5757
commit fa5c3bb014
17 changed files with 133 additions and 70 deletions

View File

@ -33,6 +33,7 @@ import io.druid.query.aggregation.Aggregators;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.query.aggregation.hyperloglog.HyperLogLogCollector;
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.DimensionSelector;
import org.apache.commons.codec.binary.Base64;
@ -107,7 +108,7 @@ public class CardinalityAggregatorFactory implements AggregatorFactory
@Override
public DimensionSelector apply(@Nullable String input)
{
return columnFactory.makeDimensionSelector(input, null);
return columnFactory.makeDimensionSelector(new DefaultDimensionSpec(input, input));
}
}
), Predicates.notNull()

View File

@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.metamx.common.StringUtils;
import io.druid.query.extraction.ExtractionFn;
import io.druid.segment.DimensionSelector;
import java.nio.ByteBuffer;
@ -66,6 +67,12 @@ public class DefaultDimensionSpec implements DimensionSpec
return null;
}
@Override
public DimensionSelector decorate(DimensionSelector selector)
{
return selector;
}
@Override
public byte[] getCacheKey()
{

View File

@ -22,6 +22,7 @@ package io.druid.query.dimension;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import io.druid.query.extraction.ExtractionFn;
import io.druid.segment.DimensionSelector;
/**
*/
@ -32,13 +33,17 @@ import io.druid.query.extraction.ExtractionFn;
})
public interface DimensionSpec
{
public String getDimension();
String getDimension();
public String getOutputName();
String getOutputName();
public ExtractionFn getExtractionFn();
//ExtractionFn can be implemented with decorate(..) fn
@Deprecated
ExtractionFn getExtractionFn();
public byte[] getCacheKey();
DimensionSelector decorate(DimensionSelector selector);
public boolean preservesOrdering();
byte[] getCacheKey();
boolean preservesOrdering();
}

View File

@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.metamx.common.StringUtils;
import io.druid.query.extraction.ExtractionFn;
import io.druid.segment.DimensionSelector;
import java.nio.ByteBuffer;
@ -77,6 +78,12 @@ public class ExtractionDimensionSpec implements DimensionSpec
return extractionFn;
}
@Override
public DimensionSelector decorate(DimensionSelector selector)
{
return selector;
}
@Override
public byte[] getCacheKey()
{

View File

@ -316,10 +316,7 @@ public class GroupByQueryEngine
for (int i = 0; i < dimensionSpecs.size(); ++i) {
final DimensionSpec dimSpec = dimensionSpecs.get(i);
final DimensionSelector selector = cursor.makeDimensionSelector(
dimSpec.getDimension(),
dimSpec.getExtractionFn()
);
final DimensionSelector selector = cursor.makeDimensionSelector(dimSpec);
if (selector != null) {
dimensions.add(selector);
dimNames.add(dimSpec.getOutputName());

View File

@ -175,7 +175,7 @@ public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
for (DimensionSpec dim : dimsToSearch) {
dimSelectors.put(
dim.getOutputName(),
cursor.makeDimensionSelector(dim.getDimension(), dim.getExtractionFn())
cursor.makeDimensionSelector(dim)
);
}

View File

@ -26,6 +26,7 @@ import com.metamx.common.ISE;
import com.metamx.common.guava.Sequence;
import io.druid.query.QueryRunnerHelper;
import io.druid.query.Result;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.segment.Cursor;
import io.druid.segment.DimensionSelector;
import io.druid.segment.LongColumnSelector;
@ -89,7 +90,7 @@ public class SelectQueryEngine
final Map<String, DimensionSelector> dimSelectors = Maps.newHashMap();
for (String dim : dims) {
// switching to using DimensionSpec for select would allow the use of extractionFn here.
final DimensionSelector dimSelector = cursor.makeDimensionSelector(dim, null);
final DimensionSelector dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec(dim, dim));
dimSelectors.put(dim, dimSelector);
}

View File

@ -43,8 +43,7 @@ public class TopNMapFn implements Function<Cursor, Result<TopNResultValue>>
public Result<TopNResultValue> apply(Cursor cursor)
{
final DimensionSelector dimSelector = cursor.makeDimensionSelector(
query.getDimensionSpec().getDimension(),
query.getDimensionSpec().getExtractionFn()
query.getDimensionSpec()
);
if (dimSelector == null) {
return null;

View File

@ -19,16 +19,14 @@
package io.druid.segment;
import io.druid.query.extraction.ExtractionFn;
import javax.annotation.Nullable;
import io.druid.query.dimension.DimensionSpec;
/**
* Factory class for MetricSelectors
*/
public interface ColumnSelectorFactory
{
public DimensionSelector makeDimensionSelector(String dimensionName, @Nullable ExtractionFn extractionFn);
public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec);
public FloatColumnSelector makeFloatColumnSelector(String columnName);
public LongColumnSelector makeLongColumnSelector(String columnName);
public ObjectColumnSelector makeObjectColumnSelector(String columnName);

View File

@ -30,6 +30,7 @@ import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import io.druid.granularity.QueryGranularity;
import io.druid.query.QueryInterruptedException;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.filter.Filter;
import io.druid.segment.column.Column;
@ -44,7 +45,6 @@ import io.druid.segment.data.Offset;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.IOException;
import java.util.Iterator;
@ -296,10 +296,19 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
@Override
public DimensionSelector makeDimensionSelector(
final String dimension,
@Nullable final ExtractionFn extractionFn
DimensionSpec dimensionSpec
)
{
return dimensionSpec.decorate(makeDimensionSelectorUndecorated(dimensionSpec));
}
private DimensionSelector makeDimensionSelectorUndecorated(
DimensionSpec dimensionSpec
)
{
final String dimension = dimensionSpec.getDimension();
final ExtractionFn extractionFn = dimensionSpec.getExtractionFn();
final Column columnDesc = index.getColumn(dimension);
if (columnDesc == null) {
return NULL_DIMENSION_SELECTOR;

View File

@ -23,6 +23,7 @@ import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.metamx.collections.bitmap.ImmutableBitmap;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.filter.BitmapIndexSelector;
import io.druid.query.filter.Filter;
@ -124,7 +125,9 @@ public class ExtractionFilter implements Filter
@Override
public ValueMatcher makeMatcher(ColumnSelectorFactory columnSelectorFactory)
{
final DimensionSelector dimensionSelector = columnSelectorFactory.makeDimensionSelector(dimension, null);
final DimensionSelector dimensionSelector = columnSelectorFactory.makeDimensionSelector(
new DefaultDimensionSpec(dimension, dimension)
);
if (dimensionSelector == null) {
return new BooleanValueMatcher(value.equals(Strings.nullToEmpty(fn.apply(null))));
} else {

View File

@ -21,6 +21,7 @@ package io.druid.segment.filter;
import com.google.common.base.Strings;
import com.metamx.collections.bitmap.ImmutableBitmap;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.filter.BitmapIndexSelector;
import io.druid.query.filter.Filter;
import io.druid.query.filter.ValueMatcher;
@ -60,7 +61,9 @@ public class SelectorFilter implements Filter
@Override
public ValueMatcher makeMatcher(ColumnSelectorFactory columnSelectorFactory)
{
final DimensionSelector dimensionSelector = columnSelectorFactory.makeDimensionSelector(dimension, null);
final DimensionSelector dimensionSelector = columnSelectorFactory.makeDimensionSelector(
new DefaultDimensionSpec(dimension, dimension)
);
// Missing columns match a null or empty string value and don't match anything else
if (dimensionSelector == null) {

View File

@ -38,6 +38,7 @@ import io.druid.data.input.impl.SpatialDimensionSchema;
import io.druid.granularity.QueryGranularity;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.PostAggregator;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.extraction.ExtractionFn;
import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.DimensionSelector;
@ -169,8 +170,20 @@ public abstract class IncrementalIndex<AggregatorType> implements Iterable<Row>,
}
@Override
public DimensionSelector makeDimensionSelector(final String dimension, final ExtractionFn extractionFn)
public DimensionSelector makeDimensionSelector(
DimensionSpec dimensionSpec
)
{
return dimensionSpec.decorate(makeDimensionSelectorUndecorated(dimensionSpec));
}
private DimensionSelector makeDimensionSelectorUndecorated(
DimensionSpec dimensionSpec
)
{
final String dimension = dimensionSpec.getDimension();
final ExtractionFn extractionFn = dimensionSpec.getExtractionFn();
return new DimensionSelector()
{
@Override

View File

@ -30,6 +30,7 @@ import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import io.druid.granularity.QueryGranularity;
import io.druid.query.QueryInterruptedException;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.filter.Filter;
import io.druid.query.filter.ValueMatcher;
@ -294,10 +295,19 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter
@Override
public DimensionSelector makeDimensionSelector(
final String dimension,
@Nullable final ExtractionFn extractionFn
DimensionSpec dimensionSpec
)
{
return dimensionSpec.decorate(makeDimensionSelectorUndecorated(dimensionSpec));
}
private DimensionSelector makeDimensionSelectorUndecorated(
DimensionSpec dimensionSpec
)
{
final String dimension = dimensionSpec.getDimension();
final ExtractionFn extractionFn = dimensionSpec.getExtractionFn();
if (dimension.equals(Column.TIME_COLUMN_NAME)) {
return new SingleScanTimeDimSelector(makeLongColumnSelector(dimension), extractionFn);
}

View File

@ -20,6 +20,7 @@
package io.druid.query.aggregation;
import com.google.common.collect.Lists;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.filter.AndDimFilter;
import io.druid.query.filter.DimFilter;
@ -73,10 +74,14 @@ public class FilteredAggregatorTest
return new ColumnSelectorFactory()
{
@Override
public DimensionSelector makeDimensionSelector(String dimensionName, ExtractionFn extractionFn)
public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec)
{
final String dimensionName = dimensionSpec.getDimension();
final ExtractionFn extractionFn = dimensionSpec.getExtractionFn();
if (dimensionName.equals("dim")) {
return new DimensionSelector()
return dimensionSpec.decorate(
new DimensionSelector()
{
@Override
public IndexedInts getRow()
@ -119,7 +124,8 @@ public class FilteredAggregatorTest
throw new IllegalArgumentException();
}
}
};
}
);
} else {
throw new UnsupportedOperationException();
}

View File

@ -36,6 +36,7 @@ import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.aggregation.JavaScriptAggregatorFactory;
import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.filter.DimFilters;
import io.druid.query.groupby.GroupByQuery;
import io.druid.query.groupby.GroupByQueryConfig;
@ -260,7 +261,7 @@ public class IncrementalIndexStorageAdapterTest
Cursor cursor = Sequences.toList(Sequences.limit(cursorSequence, 1), Lists.<Cursor>newArrayList()).get(0);
DimensionSelector dimSelector;
dimSelector = cursor.makeDimensionSelector("sally", null);
dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec("sally", "sally"));
Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0)));
index.add(
@ -274,7 +275,7 @@ public class IncrementalIndexStorageAdapterTest
// Cursor reset should not be affected by out of order values
cursor.reset();
dimSelector = cursor.makeDimensionSelector("sally", null);
dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec("sally", "sally"));
Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0)));
}

View File

@ -31,6 +31,7 @@ import io.druid.data.input.Firehose;
import io.druid.data.input.InputRow;
import io.druid.data.input.MapBasedInputRow;
import io.druid.granularity.QueryGranularity;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.filter.DimFilter;
import io.druid.query.select.EventHolder;
import io.druid.segment.Cursor;
@ -85,7 +86,9 @@ public class IngestSegmentFirehose implements Firehose
final Map<String, DimensionSelector> dimSelectors = Maps.newHashMap();
for (String dim : dims) {
final DimensionSelector dimSelector = cursor.makeDimensionSelector(dim, null);
final DimensionSelector dimSelector = cursor.makeDimensionSelector(
new DefaultDimensionSpec(dim, dim)
);
// dimSelector is null if the dimension is not present
if (dimSelector != null) {
dimSelectors.put(dim, dimSelector);