mirror of https://github.com/apache/druid.git
Revert "Merge pull request #495 from metamx/dim-extraction-null-values"
This reverts commit76437379d7
, reversing changes made to79e6d4eb56
. Conflicts: pom.xml
This commit is contained in:
parent
42994cebcd
commit
9d35490cdf
2
pom.xml
2
pom.xml
|
@ -41,7 +41,7 @@
|
|||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<metamx.java-util.version>0.25.5</metamx.java-util.version>
|
||||
<apache.curator.version>2.4.0</apache.curator.version>
|
||||
<druid.api.version>0.2.0</druid.api.version>
|
||||
<druid.api.version>0.2.2</druid.api.version>
|
||||
</properties>
|
||||
|
||||
<modules>
|
||||
|
|
|
@ -96,7 +96,10 @@ public class DimExtractionTopNAlgorithm extends BaseTopNAlgorithm<Aggregator[][]
|
|||
Aggregator[] theAggregators = rowSelector[dimIndex];
|
||||
if (theAggregators == null) {
|
||||
String key = query.getDimensionSpec().getDimExtractionFn().apply(dimSelector.lookupName(dimIndex));
|
||||
// null keys are allowed
|
||||
if (key == null) {
|
||||
rowSelector[dimIndex] = EMPTY_ARRAY;
|
||||
continue;
|
||||
}
|
||||
theAggregators = aggregatesStore.get(key);
|
||||
if (theAggregators == null) {
|
||||
theAggregators = makeAggregators(cursor, query.getAggregatorSpecs());
|
||||
|
|
|
@ -281,6 +281,7 @@ public class IncrementalIndex implements Iterable<Row>
|
|||
);
|
||||
}
|
||||
}
|
||||
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -329,7 +330,7 @@ public class IncrementalIndex implements Iterable<Row>
|
|||
int count = 0;
|
||||
for (String dimValue : dimValues) {
|
||||
String canonicalDimValue = dimLookup.get(dimValue);
|
||||
if (canonicalDimValue == null && !dimLookup.contains(dimValue)) {
|
||||
if (canonicalDimValue == null) {
|
||||
canonicalDimValue = dimValue;
|
||||
dimLookup.add(dimValue);
|
||||
}
|
||||
|
@ -559,17 +560,7 @@ public class IncrementalIndex implements Iterable<Row>
|
|||
|
||||
int valsIndex = 0;
|
||||
while (retVal == 0 && valsIndex < lhsVals.length) {
|
||||
final String lhsVal = lhsVals[valsIndex];
|
||||
final String rhsVal = rhsVals[valsIndex];
|
||||
if (lhsVal == null && rhsVal == null) {
|
||||
return 0;
|
||||
} else if (lhsVal == null) {
|
||||
return -1;
|
||||
} else if (rhsVal == null) {
|
||||
return 1;
|
||||
} else {
|
||||
retVal = lhsVal.compareTo(rhsVal);
|
||||
}
|
||||
retVal = lhsVals[valsIndex].compareTo(rhsVals[valsIndex]);
|
||||
++valsIndex;
|
||||
}
|
||||
++index;
|
||||
|
@ -602,7 +593,6 @@ public class IncrementalIndex implements Iterable<Row>
|
|||
|
||||
static class DimDim
|
||||
{
|
||||
public static final String NULL_STRING = "\u0000";
|
||||
private final Map<String, String> poorMansInterning = Maps.newConcurrentMap();
|
||||
private final Map<String, Integer> falseIds;
|
||||
private final Map<Integer, String> falseIdsReverse;
|
||||
|
@ -615,32 +605,19 @@ public class IncrementalIndex implements Iterable<Row>
|
|||
falseIdsReverse = biMap.inverse();
|
||||
}
|
||||
|
||||
public boolean contains(@Nullable String value)
|
||||
public String get(String value)
|
||||
{
|
||||
return poorMansInterning.containsKey(value == null ? NULL_STRING : value);
|
||||
return value == null ? null : poorMansInterning.get(value);
|
||||
}
|
||||
|
||||
public String get(@Nullable String value)
|
||||
public int getId(String value)
|
||||
{
|
||||
final String retVal;
|
||||
if (value == null) {
|
||||
retVal = poorMansInterning.get(NULL_STRING);
|
||||
} else {
|
||||
retVal = poorMansInterning.get(value);
|
||||
}
|
||||
return retVal == null ? null : (retVal.equals(NULL_STRING) ? null : retVal);
|
||||
return falseIds.get(value);
|
||||
}
|
||||
|
||||
public int getId(@Nullable String value)
|
||||
{
|
||||
return value == null ? falseIds.get(NULL_STRING) : falseIds.get(value);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getValue(int id)
|
||||
{
|
||||
final String value = falseIdsReverse.get(id);
|
||||
return value.equals(NULL_STRING) ? null : value;
|
||||
return falseIdsReverse.get(id);
|
||||
}
|
||||
|
||||
public int size()
|
||||
|
@ -648,30 +625,27 @@ public class IncrementalIndex implements Iterable<Row>
|
|||
return poorMansInterning.size();
|
||||
}
|
||||
|
||||
public synchronized void add(@Nullable String value)
|
||||
public Set<String> keySet()
|
||||
{
|
||||
if (value == null) {
|
||||
value = NULL_STRING;
|
||||
return poorMansInterning.keySet();
|
||||
}
|
||||
|
||||
public synchronized void add(String value)
|
||||
{
|
||||
poorMansInterning.put(value, value);
|
||||
falseIds.put(value, falseIds.size());
|
||||
}
|
||||
|
||||
public int getSortedId(@Nullable String value)
|
||||
public int getSortedId(String value)
|
||||
{
|
||||
assertSorted();
|
||||
if (value == null) {
|
||||
value = NULL_STRING;
|
||||
}
|
||||
return Arrays.binarySearch(sortedVals, value);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getSortedValue(int index)
|
||||
{
|
||||
assertSorted();
|
||||
final String sortedVal = sortedVals[index];
|
||||
return sortedVal.equals(NULL_STRING) ? null : sortedVal;
|
||||
return sortedVals[index];
|
||||
}
|
||||
|
||||
public void sort()
|
||||
|
|
|
@ -189,7 +189,7 @@ public class SpatialDimensionRowFormatter
|
|||
return false;
|
||||
}
|
||||
for (String dimVal : dimVals) {
|
||||
if (dimVal == null || Floats.tryParse(dimVal) == null) {
|
||||
if (Floats.tryParse(dimVal) == null) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,6 @@ import io.druid.query.aggregation.MaxAggregatorFactory;
|
|||
import io.druid.query.dimension.DefaultDimensionSpec;
|
||||
import io.druid.query.dimension.DimensionSpec;
|
||||
import io.druid.query.dimension.ExtractionDimensionSpec;
|
||||
import io.druid.query.extraction.DimExtractionFn;
|
||||
import io.druid.query.extraction.RegexDimExtractionFn;
|
||||
import io.druid.query.filter.JavaScriptDimFilter;
|
||||
import io.druid.query.filter.RegexDimFilter;
|
||||
|
@ -211,11 +210,9 @@ public class GroupByQueryRunnerTest
|
|||
TestHelper.assertExpectedObjects(expectedResults, results, "");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testGroupByWithDimExtractionFn()
|
||||
{
|
||||
final DimExtractionFn fn1 = new RegexDimExtractionFn("(\\w{1})");
|
||||
GroupByQuery query = GroupByQuery
|
||||
.builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
|
@ -225,20 +222,7 @@ public class GroupByQueryRunnerTest
|
|||
new ExtractionDimensionSpec(
|
||||
"quality",
|
||||
"alias",
|
||||
new DimExtractionFn()
|
||||
{
|
||||
@Override
|
||||
public byte[] getCacheKey()
|
||||
{
|
||||
return new byte[]{(byte)0xFF};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String apply(String dimValue)
|
||||
{
|
||||
return dimValue.equals("mezzanine") ? null : fn1.apply(dimValue);
|
||||
}
|
||||
}
|
||||
new RegexDimExtractionFn("(\\w{1})")
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -252,20 +236,20 @@ public class GroupByQueryRunnerTest
|
|||
.build();
|
||||
|
||||
List<Row> expectedResults = Arrays.asList(
|
||||
createExpectedRow("2011-04-01", "alias", null, "rows", 3L, "idx", 2870L),
|
||||
createExpectedRow("2011-04-01", "alias", "a", "rows", 1L, "idx", 135L),
|
||||
createExpectedRow("2011-04-01", "alias", "b", "rows", 1L, "idx", 118L),
|
||||
createExpectedRow("2011-04-01", "alias", "e", "rows", 1L, "idx", 158L),
|
||||
createExpectedRow("2011-04-01", "alias", "h", "rows", 1L, "idx", 120L),
|
||||
createExpectedRow("2011-04-01", "alias", "m", "rows", 3L, "idx", 2870L),
|
||||
createExpectedRow("2011-04-01", "alias", "n", "rows", 1L, "idx", 121L),
|
||||
createExpectedRow("2011-04-01", "alias", "p", "rows", 3L, "idx", 2900L),
|
||||
createExpectedRow("2011-04-01", "alias", "t", "rows", 2L, "idx", 197L),
|
||||
|
||||
createExpectedRow("2011-04-02", "alias", null, "rows", 3L, "idx", 2447L),
|
||||
createExpectedRow("2011-04-02", "alias", "a", "rows", 1L, "idx", 147L),
|
||||
createExpectedRow("2011-04-02", "alias", "b", "rows", 1L, "idx", 112L),
|
||||
createExpectedRow("2011-04-02", "alias", "e", "rows", 1L, "idx", 166L),
|
||||
createExpectedRow("2011-04-02", "alias", "h", "rows", 1L, "idx", 113L),
|
||||
createExpectedRow("2011-04-02", "alias", "m", "rows", 3L, "idx", 2447L),
|
||||
createExpectedRow("2011-04-02", "alias", "n", "rows", 1L, "idx", 114L),
|
||||
createExpectedRow("2011-04-02", "alias", "p", "rows", 3L, "idx", 2505L),
|
||||
createExpectedRow("2011-04-02", "alias", "t", "rows", 2L, "idx", 223L)
|
||||
|
|
|
@ -23,7 +23,6 @@ import com.google.common.base.Supplier;
|
|||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.metamx.common.guava.Sequences;
|
||||
import io.druid.collections.StupidPool;
|
||||
import io.druid.query.BySegmentResultValueClass;
|
||||
|
@ -37,7 +36,6 @@ import io.druid.query.aggregation.MaxAggregatorFactory;
|
|||
import io.druid.query.aggregation.MinAggregatorFactory;
|
||||
import io.druid.query.aggregation.PostAggregator;
|
||||
import io.druid.query.dimension.ExtractionDimensionSpec;
|
||||
import io.druid.query.extraction.DimExtractionFn;
|
||||
import io.druid.query.extraction.RegexDimExtractionFn;
|
||||
import io.druid.query.filter.AndDimFilter;
|
||||
import io.druid.query.filter.DimFilter;
|
||||
|
@ -54,7 +52,6 @@ import java.nio.ByteBuffer;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -1132,74 +1129,6 @@ public class TopNQueryRunnerTest
|
|||
TestHelper.assertExpectedResults(expectedResults, runner.run(query));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTopNDimExtractionNulls()
|
||||
{
|
||||
TopNQuery query = new TopNQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.dimension(
|
||||
new ExtractionDimensionSpec(
|
||||
providerDimension, providerDimension, new DimExtractionFn()
|
||||
{
|
||||
@Override
|
||||
public byte[] getCacheKey()
|
||||
{
|
||||
return new byte[]{(byte)0xFF};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String apply(String dimValue)
|
||||
{
|
||||
return dimValue.equals("total_market") ? null : dimValue;
|
||||
}
|
||||
})
|
||||
)
|
||||
.metric("rows")
|
||||
.threshold(4)
|
||||
.intervals(QueryRunnerTestHelper.firstToThird)
|
||||
.aggregators(QueryRunnerTestHelper.commonAggregators)
|
||||
.postAggregators(Arrays.<PostAggregator>asList(QueryRunnerTestHelper.addRowsIndexConstant))
|
||||
.build();
|
||||
|
||||
Map<String, Object> nullValue = Maps.newHashMap();
|
||||
nullValue.put(providerDimension, null);
|
||||
nullValue.putAll(ImmutableMap.of(
|
||||
"rows", 4L,
|
||||
"index", 5351.814697265625D,
|
||||
"addRowsIndexConstant", 5356.814697265625D,
|
||||
"uniques", QueryRunnerTestHelper.UNIQUES_2
|
||||
));
|
||||
|
||||
List<Result<TopNResultValue>> expectedResults = Arrays.asList(
|
||||
new Result<>(
|
||||
new DateTime("2011-04-01T00:00:00.000Z"),
|
||||
new TopNResultValue(
|
||||
Arrays.<Map<String, Object>>asList(
|
||||
ImmutableMap.<String, Object>of(
|
||||
providerDimension, "spot",
|
||||
"rows", 18L,
|
||||
"index", 2231.8768157958984D,
|
||||
"addRowsIndexConstant", 2250.8768157958984D,
|
||||
"uniques", QueryRunnerTestHelper.UNIQUES_9
|
||||
),
|
||||
nullValue
|
||||
,
|
||||
ImmutableMap.<String, Object>of(
|
||||
providerDimension, "upfront",
|
||||
"rows", 4L,
|
||||
"index", 4875.669677734375D,
|
||||
"addRowsIndexConstant", 4880.669677734375D,
|
||||
"uniques", QueryRunnerTestHelper.UNIQUES_2
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
TestHelper.assertExpectedResults(expectedResults, runner.run(query));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvertedTopNQuery()
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue