verify no duplicate aggregator names in DataSchema (#3917)

This commit is contained in:
Himanshu 2017-02-08 18:12:07 -06:00 committed by Slim
parent 9191588656
commit e08cd0066b
2 changed files with 40 additions and 2 deletions

View File

@ -26,7 +26,6 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.InputRowParser;
import io.druid.data.input.impl.TimestampSpec; import io.druid.data.input.impl.TimestampSpec;
@ -37,6 +36,7 @@ import io.druid.segment.indexing.granularity.GranularitySpec;
import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.segment.indexing.granularity.UniformGranularitySpec;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -66,9 +66,18 @@ public class DataSchema
this.dataSource = Preconditions.checkNotNull(dataSource, "dataSource cannot be null. Please provide a dataSource."); this.dataSource = Preconditions.checkNotNull(dataSource, "dataSource cannot be null. Please provide a dataSource.");
this.parser = parser; this.parser = parser;
if (aggregators.length == 0) { if (aggregators == null || aggregators.length == 0) {
log.warn("No metricsSpec has been specified. Are you sure this is what you want?"); log.warn("No metricsSpec has been specified. Are you sure this is what you want?");
} else {
//validate for no duplication
Set<String> names = new HashSet<>();
for (AggregatorFactory factory : aggregators) {
if (!names.add(factory.getName())) {
throw new IAE("duplicate aggregators found with name [%s].", factory.getName());
}
}
} }
this.aggregators = aggregators; this.aggregators = aggregators;
if (granularitySpec == null) { if (granularitySpec == null) {

View File

@ -144,6 +144,35 @@ public class DataSchemaTest
schema.getParser(); schema.getParser();
} }
@Test(expected = IAE.class)
public void testDuplicateAggregators() throws Exception
{
Map<String, Object> parser = jsonMapper.convertValue(
new StringInputRowParser(
new JSONParseSpec(
new TimestampSpec("time", "auto", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time")), ImmutableList.of("dimC"), null),
null,
null
),
null
), new TypeReference<Map<String, Object>>() {}
);
DataSchema schema = new DataSchema(
"test",
parser,
new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1"),
new DoubleSumAggregatorFactory("metric2", "col2"),
new DoubleSumAggregatorFactory("metric1", "col3"),
},
new ArbitraryGranularitySpec(QueryGranularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))),
jsonMapper
);
schema.getParser();
}
@Test @Test
public void testSerdeWithInvalidParserMap() throws Exception public void testSerdeWithInvalidParserMap() throws Exception
{ {