fix serialisation of segmentMetaDataQuery

This commit is contained in:
nishantmonu51 2014-03-21 14:11:26 +05:30
parent d7cd733870
commit dc0e7f404a
4 changed files with 72 additions and 5 deletions

View File

@ -34,4 +34,16 @@ public class AllColumnIncluderator implements ColumnIncluderator
{
return ALL_CACHE_PREFIX;
}
@Override
public boolean equals(Object obj)
{
return obj instanceof AllColumnIncluderator;
}
@Override
public int hashCode()
{
return AllColumnIncluderator.class.hashCode();
}
}

View File

@ -21,7 +21,9 @@ package io.druid.query.metadata.metadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import io.druid.query.BaseQuery;
import io.druid.query.DataSource;
import io.druid.query.Query;
import io.druid.query.TableDataSource;
import io.druid.query.spec.QuerySegmentSpec;
@ -36,17 +38,18 @@ public class SegmentMetadataQuery extends BaseQuery<SegmentAnalysis>
@JsonCreator
public SegmentMetadataQuery(
@JsonProperty("dataSource") String dataSource,
@JsonProperty("dataSource") DataSource dataSource,
@JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
@JsonProperty("toInclude") ColumnIncluderator toInclude,
@JsonProperty("merge") Boolean merge,
@JsonProperty("context") Map<String, String> context
)
{
super(new TableDataSource(dataSource), querySegmentSpec, context);
super(dataSource, querySegmentSpec, context);
this.toInclude = toInclude == null ? new AllColumnIncluderator() : toInclude;
this.merge = merge == null ? false : merge;
Preconditions.checkArgument(dataSource instanceof TableDataSource, "SegmentMetadataQuery only supports table datasource");
}
@JsonProperty
@ -77,7 +80,7 @@ public class SegmentMetadataQuery extends BaseQuery<SegmentAnalysis>
public Query<SegmentAnalysis> withOverriddenContext(Map<String, String> contextOverride)
{
return new SegmentMetadataQuery(
((TableDataSource)getDataSource()).getName(),
getDataSource(),
getQuerySegmentSpec(), toInclude, merge, computeOverridenContext(contextOverride)
);
}
@ -86,7 +89,7 @@ public class SegmentMetadataQuery extends BaseQuery<SegmentAnalysis>
public Query<SegmentAnalysis> withQuerySegmentSpec(QuerySegmentSpec spec)
{
return new SegmentMetadataQuery(
((TableDataSource)getDataSource()).getName(),
getDataSource(),
spec, toInclude, merge, getContext());
}

View File

@ -21,6 +21,7 @@ package io.druid.query.metadata;
import com.google.common.collect.Lists;
import com.metamx.common.guava.Sequences;
import io.druid.query.LegacyDataSource;
import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerFactory;
import io.druid.query.QueryRunnerTestHelper;
@ -98,7 +99,7 @@ public class SegmentAnalyzerTest
);
final SegmentMetadataQuery query = new SegmentMetadataQuery(
"test", QuerySegmentSpecs.create("2011/2012"), null, null, null
new LegacyDataSource("test"), QuerySegmentSpecs.create("2011/2012"), null, null, null
);
return Sequences.toList(query.run(runner), Lists.<SegmentAnalysis>newArrayList());
}

View File

@ -0,0 +1,51 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package io.druid.query.metadata;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.Query;
import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Test;
public class SegmentMetadataQueryTest
{
private ObjectMapper mapper = new DefaultObjectMapper();
@Test
public void testSerde() throws Exception
{
String queryStr = "{\n"
+ " \"queryType\":\"segmentMetadata\",\n"
+ " \"dataSource\":\"test_ds\",\n"
+ " \"intervals\":[\"2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z\"]\n"
+ "}";
Query query = mapper.readValue(queryStr, Query.class);
Assert.assertTrue(query instanceof SegmentMetadataQuery);
Assert.assertEquals("test_ds", query.getDataSource().getName());
Assert.assertEquals(new Interval("2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z"), query.getIntervals().get(0));
// test serialize and deserialize
Assert.assertEquals(query, mapper.readValue(mapper.writeValueAsString(query), Query.class));
}
}