mirror of https://github.com/apache/druid.git
Merge pull request #738 from metamx/dim-ex
Have better dimension exclusions
This commit is contained in:
commit
a73670ee7b
2
pom.xml
2
pom.xml
|
@ -41,7 +41,7 @@
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<metamx.java-util.version>0.26.6</metamx.java-util.version>
|
<metamx.java-util.version>0.26.6</metamx.java-util.version>
|
||||||
<apache.curator.version>2.6.0</apache.curator.version>
|
<apache.curator.version>2.6.0</apache.curator.version>
|
||||||
<druid.api.version>0.2.9</druid.api.version>
|
<druid.api.version>0.2.8</druid.api.version>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<modules>
|
<modules>
|
||||||
|
|
|
@ -21,11 +21,14 @@ package io.druid.segment.indexing;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.google.common.collect.Sets;
|
||||||
import io.druid.data.input.impl.InputRowParser;
|
import io.druid.data.input.impl.InputRowParser;
|
||||||
import io.druid.query.aggregation.AggregatorFactory;
|
import io.druid.query.aggregation.AggregatorFactory;
|
||||||
import io.druid.segment.indexing.granularity.GranularitySpec;
|
import io.druid.segment.indexing.granularity.GranularitySpec;
|
||||||
import io.druid.segment.indexing.granularity.UniformGranularitySpec;
|
import io.druid.segment.indexing.granularity.UniformGranularitySpec;
|
||||||
|
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class DataSchema
|
public class DataSchema
|
||||||
|
@ -44,7 +47,31 @@ public class DataSchema
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.dataSource = dataSource;
|
this.dataSource = dataSource;
|
||||||
this.parser = parser;
|
|
||||||
|
final Set<String> dimensionExclusions = Sets.newHashSet();
|
||||||
|
for (AggregatorFactory aggregator : aggregators) {
|
||||||
|
dimensionExclusions.add(aggregator.getName());
|
||||||
|
}
|
||||||
|
if (parser != null && parser.getParseSpec() != null) {
|
||||||
|
if (parser.getParseSpec().getTimestampSpec() != null) {
|
||||||
|
dimensionExclusions.add(parser.getParseSpec().getTimestampSpec().getTimestampColumn());
|
||||||
|
}
|
||||||
|
if (parser.getParseSpec().getDimensionsSpec() != null) {
|
||||||
|
this.parser = parser.withParseSpec(
|
||||||
|
parser.getParseSpec()
|
||||||
|
.withDimensionsSpec(
|
||||||
|
parser.getParseSpec()
|
||||||
|
.getDimensionsSpec()
|
||||||
|
.withDimensionExclusions(dimensionExclusions)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
this.parser = parser;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.parser = parser;
|
||||||
|
}
|
||||||
|
|
||||||
this.aggregators = aggregators;
|
this.aggregators = aggregators;
|
||||||
this.granularitySpec = granularitySpec == null
|
this.granularitySpec = granularitySpec == null
|
||||||
? new UniformGranularitySpec(null, null, null, null)
|
? new UniformGranularitySpec(null, null, null, null)
|
||||||
|
|
Loading…
Reference in New Issue