mirror of https://github.com/apache/druid.git
Merge pull request #1755 from metamx/update-druid-api
update druid-api for timestamp parsing speedup
This commit is contained in:
commit
34a8fbcd68
4
pom.xml
4
pom.xml
|
@ -65,11 +65,11 @@
|
||||||
</scm>
|
</scm>
|
||||||
|
|
||||||
<properties>
|
<properties>
|
||||||
<metamx.java-util.version>0.27.2</metamx.java-util.version>
|
<metamx.java-util.version>0.27.3</metamx.java-util.version>
|
||||||
<apache.curator.version>2.8.0</apache.curator.version>
|
<apache.curator.version>2.8.0</apache.curator.version>
|
||||||
<jetty.version>9.2.13.v20150730</jetty.version>
|
<jetty.version>9.2.13.v20150730</jetty.version>
|
||||||
<jersey.version>1.19</jersey.version>
|
<jersey.version>1.19</jersey.version>
|
||||||
<druid.api.version>0.3.12</druid.api.version>
|
<druid.api.version>0.3.13</druid.api.version>
|
||||||
<!-- Watch out for Hadoop compatibility when updating to >= 2.5; see https://github.com/druid-io/druid/pull/1669 -->
|
<!-- Watch out for Hadoop compatibility when updating to >= 2.5; see https://github.com/druid-io/druid/pull/1669 -->
|
||||||
<jackson.version>2.4.6</jackson.version>
|
<jackson.version>2.4.6</jackson.version>
|
||||||
<log4j.version>2.3</log4j.version>
|
<log4j.version>2.3</log4j.version>
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
|
|
||||||
package io.druid.segment.indexing;
|
package io.druid.segment.indexing;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.type.TypeReference;
|
||||||
import com.fasterxml.jackson.databind.InjectableValues;
|
import com.fasterxml.jackson.databind.InjectableValues;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
@ -56,7 +57,7 @@ public class DataSchemaTest
|
||||||
new TimestampSpec("time", "auto", null),
|
new TimestampSpec("time", "auto", null),
|
||||||
new DimensionsSpec(ImmutableList.of("dimB", "dimA"), null, null)
|
new DimensionsSpec(ImmutableList.of("dimB", "dimA"), null, null)
|
||||||
)
|
)
|
||||||
), Map.class
|
), new TypeReference<Map<String, Object>>() {}
|
||||||
);
|
);
|
||||||
|
|
||||||
DataSchema schema = new DataSchema(
|
DataSchema schema = new DataSchema(
|
||||||
|
@ -85,7 +86,7 @@ public class DataSchemaTest
|
||||||
new TimestampSpec("time", "auto", null),
|
new TimestampSpec("time", "auto", null),
|
||||||
new DimensionsSpec(ImmutableList.of("time", "dimA", "dimB", "col2"), ImmutableList.of("dimC"), null)
|
new DimensionsSpec(ImmutableList.of("time", "dimA", "dimB", "col2"), ImmutableList.of("dimC"), null)
|
||||||
)
|
)
|
||||||
), Map.class
|
), new TypeReference<Map<String, Object>>() {}
|
||||||
);
|
);
|
||||||
|
|
||||||
DataSchema schema = new DataSchema(
|
DataSchema schema = new DataSchema(
|
||||||
|
@ -114,7 +115,7 @@ public class DataSchemaTest
|
||||||
new TimestampSpec("time", "auto", null),
|
new TimestampSpec("time", "auto", null),
|
||||||
new DimensionsSpec(ImmutableList.of("time", "dimA", "dimB", "metric1"), ImmutableList.of("dimC"), null)
|
new DimensionsSpec(ImmutableList.of("time", "dimA", "dimB", "metric1"), ImmutableList.of("dimC"), null)
|
||||||
)
|
)
|
||||||
), Map.class
|
), new TypeReference<Map<String, Object>>() {}
|
||||||
);
|
);
|
||||||
|
|
||||||
DataSchema schema = new DataSchema(
|
DataSchema schema = new DataSchema(
|
||||||
|
@ -170,7 +171,8 @@ public class DataSchemaTest
|
||||||
+ "\"parseSpec\":{"
|
+ "\"parseSpec\":{"
|
||||||
+ "\"format\":\"json\","
|
+ "\"format\":\"json\","
|
||||||
+ "\"timestampSpec\":{\"column\":\"xXx\", \"format\": \"auto\", \"missingValue\": null},"
|
+ "\"timestampSpec\":{\"column\":\"xXx\", \"format\": \"auto\", \"missingValue\": null},"
|
||||||
+ "\"dimensionsSpec\":{\"dimensions\":[], \"dimensionExclusions\":[], \"spatialDimensions\":[]}}"
|
+ "\"dimensionsSpec\":{\"dimensions\":[], \"dimensionExclusions\":[], \"spatialDimensions\":[]}},"
|
||||||
|
+ "\"encoding\":\"UTF-8\""
|
||||||
+ "},"
|
+ "},"
|
||||||
+ "\"metricsSpec\":[{\"type\":\"doubleSum\",\"name\":\"metric1\",\"fieldName\":\"col1\"}],"
|
+ "\"metricsSpec\":[{\"type\":\"doubleSum\",\"name\":\"metric1\",\"fieldName\":\"col1\"}],"
|
||||||
+ "\"granularitySpec\":{"
|
+ "\"granularitySpec\":{"
|
||||||
|
@ -188,13 +190,13 @@ public class DataSchemaTest
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
new DataSchema(
|
new DataSchema(
|
||||||
"test",
|
"test",
|
||||||
jsonMapper.convertValue(
|
jsonMapper.<Map<String, Object>>convertValue(
|
||||||
new StringInputRowParser(
|
new StringInputRowParser(
|
||||||
new JSONParseSpec(
|
new JSONParseSpec(
|
||||||
new TimestampSpec("xXx", null, null),
|
new TimestampSpec("xXx", null, null),
|
||||||
new DimensionsSpec(null, null, null)
|
new DimensionsSpec(null, null, null)
|
||||||
)
|
)
|
||||||
), Map.class
|
), new TypeReference<Map<String, Object>>() {}
|
||||||
),
|
),
|
||||||
new AggregatorFactory[]{
|
new AggregatorFactory[]{
|
||||||
new DoubleSumAggregatorFactory("metric1", "col1")
|
new DoubleSumAggregatorFactory("metric1", "col1")
|
||||||
|
|
Loading…
Reference in New Issue