Fix DataSourceMetadataResultValue serde.

This commit is contained in:
Gian Merlino 2015-02-03 17:39:42 -08:00
parent 1298b4abd3
commit 085ad8d345
3 changed files with 52 additions and 25 deletions

View File

@ -22,7 +22,6 @@ package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import io.druid.common.utils.JodaUtils;
import io.druid.query.BaseQuery;
import io.druid.query.DataSource;
@ -45,9 +44,6 @@ public class DataSourceMetadataQuery extends BaseQuery<Result<DataSourceMetadata
JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT
);
public static String MAX_INGESTED_EVENT_TIME = "maxIngestedEventTime";
@JsonCreator
public DataSourceMetadataQuery(
@JsonProperty("dataSource") DataSource dataSource,

View File

@ -20,7 +20,7 @@
package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.joda.time.DateTime;
/**
@ -31,13 +31,13 @@ public class DataSourceMetadataResultValue
@JsonCreator
public DataSourceMetadataResultValue(
DateTime maxIngestedEventTime
@JsonProperty("maxIngestedEventTime") DateTime maxIngestedEventTime
)
{
this.maxIngestedEventTime = maxIngestedEventTime;
}
@JsonValue
@JsonProperty
public DateTime getMaxIngestedEventTime()
{
return maxIngestedEventTime;

View File

@ -21,6 +21,7 @@
package io.druid.query.datasourcemetadata;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
@ -40,7 +41,6 @@ import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.timeboundary.TimeBoundaryQueryQueryToolChest;
import io.druid.segment.IncrementalIndexSegment;
import io.druid.segment.TestIndex;
import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.timeline.LogicalSegment;
@ -75,20 +75,20 @@ public class DataSourceMetadataQueryTest
public void testContextSerde() throws Exception
{
final DataSourceMetadataQuery query = Druids.newDataSourceMetadataQueryBuilder()
.dataSource("foo")
.intervals("2013/2014")
.context(
ImmutableMap.<String, Object>of(
"priority",
1,
"useCache",
true,
"populateCache",
true,
"finalize",
true
)
).build();
.dataSource("foo")
.intervals("2013/2014")
.context(
ImmutableMap.<String, Object>of(
"priority",
1,
"useCache",
true,
"populateCache",
true,
"finalize",
true
)
).build();
final ObjectMapper mapper = new DefaultObjectMapper();
@ -113,7 +113,8 @@ public class DataSourceMetadataQueryTest
{
final IncrementalIndex rtIndex = new OnheapIncrementalIndex(
0L, QueryGranularity.NONE, new AggregatorFactory[]{new CountAggregatorFactory("count")}, 1000
);;
);
;
final QueryRunner runner = QueryRunnerTestHelper.makeQueryRunner(
(QueryRunnerFactory) new DataSourceMetadataQueryRunnerFactory(
QueryRunnerTestHelper.NOOP_QUERYWATCHER
@ -128,8 +129,8 @@ public class DataSourceMetadataQueryTest
)
);
DataSourceMetadataQuery dataSourceMetadataQuery = Druids.newDataSourceMetadataQueryBuilder()
.dataSource("testing")
.build();
.dataSource("testing")
.build();
Map<String, Object> context = new MapMaker().makeMap();
context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList());
Iterable<Result<DataSourceMetadataResultValue>> results = Sequences.toList(
@ -209,4 +210,34 @@ public class DataSourceMetadataQueryTest
}
}
@Test
public void testResultSerialization()
{
final DataSourceMetadataResultValue resultValue = new DataSourceMetadataResultValue(new DateTime("2000-01-01T00Z"));
final Map<String, Object> resultValueMap = new DefaultObjectMapper().convertValue(
resultValue,
new TypeReference<Map<String, Object>>()
{
}
);
Assert.assertEquals(
ImmutableMap.<String, Object>of("maxIngestedEventTime", "2000-01-01T00:00:00.000Z"),
resultValueMap
);
}
@Test
public void testResultDeserialization()
{
final Map<String, Object> resultValueMap = ImmutableMap.<String, Object>of(
"maxIngestedEventTime",
"2000-01-01T00:00:00.000Z"
);
final DataSourceMetadataResultValue resultValue = new DefaultObjectMapper().convertValue(
resultValueMap,
DataSourceMetadataResultValue.class
);
Assert.assertEquals(new DateTime("2000"), resultValue.getMaxIngestedEventTime());
}
}