Merge pull request #1712 from metamx/searchQueryDimExtraction

Add dimension extraction functionality to SearchQuery
This commit is contained in:
Xavier Léauté 2015-09-14 15:26:13 -07:00
commit 5f36e7a992
11 changed files with 360 additions and 73 deletions

View File

@ -17,17 +17,22 @@
package io.druid.query;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import io.druid.granularity.QueryGranularity;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.PostAggregator;
import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.filter.AndDimFilter;
import io.druid.query.filter.DimFilter;
import io.druid.query.filter.NoopDimFilter;
import io.druid.query.filter.NotDimFilter;
import io.druid.query.filter.OrDimFilter;
import io.druid.query.filter.SelectorDimFilter;
import io.druid.query.datasourcemetadata.DataSourceMetadataQuery;
import io.druid.query.metadata.metadata.ColumnIncluderator;
import io.druid.query.metadata.metadata.SegmentMetadataQuery;
import io.druid.query.search.SearchResultValue;
@ -44,6 +49,7 @@ import io.druid.query.timeseries.TimeseriesQuery;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Map;
@ -51,6 +57,16 @@ import java.util.Map;
*/
public class Druids
{
public static final Function<String, DimensionSpec> DIMENSION_IDENTITY = new Function<String, DimensionSpec>()
{
@Nullable
@Override
public DimensionSpec apply(String input)
{
return new DefaultDimensionSpec(input, input);
}
};
private Druids()
{
throw new AssertionError();
@ -506,7 +522,7 @@ public class Druids
private QueryGranularity granularity;
private int limit;
private QuerySegmentSpec querySegmentSpec;
private List<String> dimensions;
private List<DimensionSpec> dimensions;
private SearchQuerySpec querySpec;
private Map<String, Object> context;
@ -634,12 +650,24 @@ public class Druids
}
public SearchQueryBuilder dimensions(String d)
{
dimensions = ImmutableList.of(DIMENSION_IDENTITY.apply(d));
return this;
}
public SearchQueryBuilder dimensions(Iterable<String> d)
{
dimensions = ImmutableList.copyOf(Iterables.transform(d, DIMENSION_IDENTITY));
return this;
}
public SearchQueryBuilder dimensions(DimensionSpec d)
{
dimensions = Lists.newArrayList(d);
return this;
}
public SearchQueryBuilder dimensions(List<String> d)
public SearchQueryBuilder dimensions(List<DimensionSpec> d)
{
dimensions = d;
return this;

View File

@ -30,7 +30,8 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
@JsonSubTypes.Type(name = "searchQuery", value = SearchQuerySpecDimExtractionFn.class),
@JsonSubTypes.Type(name = "javascript", value = JavascriptExtractionFn.class),
@JsonSubTypes.Type(name = "timeFormat", value = TimeFormatExtractionFn.class),
@JsonSubTypes.Type(name = "lookup", value = LookupExtractionFn.class)
@JsonSubTypes.Type(name = "lookup", value = LookupExtractionFn.class),
@JsonSubTypes.Type(name = "identity", value = IdentityExtractionFn.class)
})
/**
* An ExtractionFn is a function that can be used to transform the values of a column (typically a dimension)

View File

@ -0,0 +1,62 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.extraction;
import com.google.common.base.Strings;
public class IdentityExtractionFn implements ExtractionFn
{
private static final byte CACHE_TYPE_ID = 0x6;
@Override
public byte[] getCacheKey()
{
return new byte[]{CACHE_TYPE_ID};
}
@Override
public String apply(Object value)
{
return value == null ? null : Strings.emptyToNull(value.toString());
}
@Override
public String apply(String value)
{
return Strings.emptyToNull(value);
}
@Override
public String apply(long value)
{
return Long.toString(value);
}
@Override
public boolean preservesOrdering()
{
return true;
}
@Override
public ExtractionType getExtractionType()
{
return ExtractionType.ONE_TO_ONE;
}
}

View File

@ -20,10 +20,10 @@ package io.druid.query.search;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import com.google.common.primitives.Ints;
import com.google.inject.Inject;
import com.metamx.common.IAE;
@ -32,19 +32,19 @@ import com.metamx.common.guava.MergeSequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.common.guava.nary.BinaryFn;
import com.metamx.common.StringUtils;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.collections.OrderedMergeSequence;
import io.druid.query.CacheStrategy;
import io.druid.query.DruidMetrics;
import io.druid.query.IntervalChunkingQueryRunnerDecorator;
import io.druid.query.Query;
import io.druid.query.DruidMetrics;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.Result;
import io.druid.query.ResultGranularTimestampComparator;
import io.druid.query.ResultMergeQueryRunner;
import io.druid.query.aggregation.MetricManipulationFn;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.filter.DimFilter;
import io.druid.query.search.search.SearchHit;
import io.druid.query.search.search.SearchQuery;
@ -53,9 +53,9 @@ import org.joda.time.DateTime;
import javax.annotation.Nullable;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
*/
@ -152,16 +152,15 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
final byte[] querySpecBytes = query.getQuery().getCacheKey();
final byte[] granularityBytes = query.getGranularity().cacheKey();
final Set<String> dimensions = Sets.newTreeSet();
if (query.getDimensions() != null) {
dimensions.addAll(query.getDimensions());
}
final Collection<DimensionSpec> dimensions = query.getDimensions() == null
? ImmutableList.<DimensionSpec>of()
: query.getDimensions();
final byte[][] dimensionsBytes = new byte[dimensions.size()][];
int dimensionsBytesSize = 0;
int index = 0;
for (String dimension : dimensions) {
dimensionsBytes[index] = StringUtils.toUtf8(dimension);
for (DimensionSpec dimension : dimensions) {
dimensionsBytes[index] = dimension.getCacheKey();
dimensionsBytesSize += dimensionsBytes[index].length;
++index;
}
@ -215,7 +214,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
List<Object> result = (List<Object>) input;
return new Result<>(
new DateTime(((Number)result.get(0)).longValue()),
new DateTime(((Number) result.get(0)).longValue()),
new SearchResultValue(
Lists.transform(
(List) result.get(1),

View File

@ -17,8 +17,11 @@
package io.druid.query.search;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
@ -31,9 +34,13 @@ import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.emitter.EmittingLogger;
import io.druid.granularity.QueryGranularity;
import io.druid.query.Druids;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.Result;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.extraction.IdentityExtractionFn;
import io.druid.query.filter.Filter;
import io.druid.query.search.search.SearchHit;
import io.druid.query.search.search.SearchQuery;
@ -49,6 +56,7 @@ import io.druid.segment.column.Column;
import io.druid.segment.data.IndexedInts;
import io.druid.segment.filter.Filters;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@ -59,7 +67,6 @@ import java.util.TreeSet;
public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
{
private static final EmittingLogger log = new EmittingLogger(SearchQueryRunner.class);
private final Segment segment;
public SearchQueryRunner(Segment segment)
@ -79,44 +86,50 @@ public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
final SearchQuery query = (SearchQuery) input;
final Filter filter = Filters.convertDimensionFilters(query.getDimensionsFilter());
final List<String> dimensions = query.getDimensions();
final List<DimensionSpec> dimensions = query.getDimensions();
final SearchQuerySpec searchQuerySpec = query.getQuery();
final int limit = query.getLimit();
// Closing this will cause segfaults in unit tests.
final QueryableIndex index = segment.asQueryableIndex();
if (index != null) {
final TreeSet<SearchHit> retVal = Sets.newTreeSet(query.getSort().getComparator());
Iterable<String> dimsToSearch;
Iterable<DimensionSpec> dimsToSearch;
if (dimensions == null || dimensions.isEmpty()) {
dimsToSearch = index.getAvailableDimensions();
dimsToSearch = Iterables.transform(index.getAvailableDimensions(), Druids.DIMENSION_IDENTITY);
} else {
dimsToSearch = dimensions;
}
BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions();
final BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions();
final ImmutableBitmap baseFilter;
if (filter == null) {
baseFilter = bitmapFactory.complement(bitmapFactory.makeEmptyImmutableBitmap(), index.getNumRows());
} else {
ColumnSelectorBitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector(bitmapFactory, index);
final ColumnSelectorBitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector(bitmapFactory, index);
baseFilter = filter.getBitmapIndex(selector);
}
for (String dimension : dimsToSearch) {
final Column column = index.getColumn(dimension);
for (DimensionSpec dimension : dimsToSearch) {
final Column column = index.getColumn(dimension.getDimension());
if (column == null) {
continue;
}
final BitmapIndex bitmapIndex = column.getBitmapIndex();
ExtractionFn extractionFn = dimension.getExtractionFn();
if (extractionFn == null) {
extractionFn = new IdentityExtractionFn();
}
if (bitmapIndex != null) {
for (int i = 0; i < bitmapIndex.getCardinality(); ++i) {
String dimVal = Strings.nullToEmpty(bitmapIndex.getValue(i));
String dimVal = Strings.nullToEmpty(extractionFn.apply(bitmapIndex.getValue(i)));
if (searchQuerySpec.accept(dimVal) &&
bitmapFactory.intersection(Arrays.asList(baseFilter, bitmapIndex.getBitmap(i))).size() > 0) {
retVal.add(new SearchHit(dimension, dimVal));
retVal.add(new SearchHit(dimension.getOutputName(), dimVal));
if (retVal.size() >= limit) {
return makeReturnResult(limit, retVal);
}
@ -139,9 +152,9 @@ public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
);
}
final Iterable<String> dimsToSearch;
final Iterable<DimensionSpec> dimsToSearch;
if (dimensions == null || dimensions.isEmpty()) {
dimsToSearch = adapter.getAvailableDimensions();
dimsToSearch = Iterables.transform(adapter.getAvailableDimensions(), Druids.DIMENSION_IDENTITY);
} else {
dimsToSearch = dimensions;
}
@ -160,9 +173,11 @@ public class SearchQueryRunner implements QueryRunner<Result<SearchResultValue>>
}
Map<String, DimensionSelector> dimSelectors = Maps.newHashMap();
for (String dim : dimsToSearch) {
// switching to using DimensionSpec for search would allow the use of extractionFn here.
dimSelectors.put(dim, cursor.makeDimensionSelector(dim, null));
for (DimensionSpec dim : dimsToSearch) {
dimSelectors.put(
dim.getOutputName(),
cursor.makeDimensionSelector(dim.getDimension(), dim.getExtractionFn())
);
}
while (!cursor.isDone()) {

View File

@ -27,6 +27,7 @@ import io.druid.query.BaseQuery;
import io.druid.query.DataSource;
import io.druid.query.Query;
import io.druid.query.Result;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.filter.DimFilter;
import io.druid.query.search.SearchResultValue;
import io.druid.query.spec.QuerySegmentSpec;
@ -42,7 +43,7 @@ public class SearchQuery extends BaseQuery<Result<SearchResultValue>>
private final DimFilter dimFilter;
private final SearchSortSpec sortSpec;
private final QueryGranularity granularity;
private final List<String> dimensions;
private final List<DimensionSpec> dimensions;
private final SearchQuerySpec querySpec;
private final int limit;
@ -53,7 +54,7 @@ public class SearchQuery extends BaseQuery<Result<SearchResultValue>>
@JsonProperty("granularity") QueryGranularity granularity,
@JsonProperty("limit") int limit,
@JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
@JsonProperty("searchDimensions") List<String> dimensions,
@JsonProperty("searchDimensions") List<DimensionSpec> dimensions,
@JsonProperty("query") SearchQuerySpec querySpec,
@JsonProperty("sort") SearchSortSpec sortSpec,
@JsonProperty("context") Map<String, Object> context
@ -64,17 +65,7 @@ public class SearchQuery extends BaseQuery<Result<SearchResultValue>>
this.sortSpec = sortSpec == null ? new LexicographicSearchSortSpec() : sortSpec;
this.granularity = granularity == null ? QueryGranularity.ALL : granularity;
this.limit = (limit == 0) ? 1000 : limit;
this.dimensions = (dimensions == null) ? null : Lists.transform(
dimensions,
new Function<String, String>()
{
@Override
public String apply(@Nullable String input)
{
return input;
}
}
);
this.dimensions = dimensions;
this.querySpec = querySpec;
Preconditions.checkNotNull(querySegmentSpec, "Must specify an interval");
@ -160,7 +151,7 @@ public class SearchQuery extends BaseQuery<Result<SearchResultValue>>
}
@JsonProperty("searchDimensions")
public List<String> getDimensions()
public List<DimensionSpec> getDimensions()
{
return dimensions;
}

View File

@ -0,0 +1,45 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.dimension;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.druid.jackson.DefaultObjectMapper;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
public class DefaultDimensionSpecTest
{
private final ObjectMapper mapper = new DefaultObjectMapper();
@Test
public void testEqualsSerde() throws IOException
{
final String name = "foo";
final DimensionSpec spec = new DefaultDimensionSpec(name, name);
final String json = mapper.writeValueAsString(spec);
final DimensionSpec other = mapper.readValue(json, DimensionSpec.class);
Assert.assertEquals(spec.toString(), other.toString());
Assert.assertEquals(spec, other);
Assert.assertEquals(spec.hashCode(), other.hashCode());
}
}

View File

@ -0,0 +1,48 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.dimension;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.druid.jackson.DefaultObjectMapper;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.List;
public class LegacyDimensionSpecTest
{
private final ObjectMapper mapper = new DefaultObjectMapper();
@Test
public void testEqualsSerde() throws IOException
{
final String dimension = "testDimension";
final List<DimensionSpec> deserializedSpecs = mapper.readValue(
String.format("[\"%s\"]", dimension), new TypeReference<List<DimensionSpec>>()
{
}
);
Assert.assertEquals(dimension, deserializedSpecs.get(0).getDimension());
Assert.assertEquals(dimension, deserializedSpecs.get(0).getOutputName());
Assert.assertEquals(new LegacyDimensionSpec(dimension), deserializedSpecs.get(0));
}
}

View File

@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableList;
import io.druid.granularity.QueryGranularity;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.CacheStrategy;
import io.druid.query.Druids;
import io.druid.query.Result;
import io.druid.query.TableDataSource;
import io.druid.query.search.search.FragmentSearchQuerySpec;
@ -55,7 +56,7 @@ public class SearchQueryQueryToolChestTest
)
)
),
ImmutableList.of("dim1"),
ImmutableList.of(Druids.DIMENSION_IDENTITY.apply("dim1")),
new FragmentSearchQuerySpec(ImmutableList.of("a", "b")),
null,
null

View File

@ -17,7 +17,6 @@
package io.druid.query.search;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@ -27,7 +26,11 @@ import io.druid.query.Druids;
import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.Result;
import io.druid.query.dimension.ExtractionDimensionSpec;
import io.druid.query.extraction.LookupExtractionFn;
import io.druid.query.extraction.MapLookupExtractor;
import io.druid.query.filter.DimFilter;
import io.druid.query.filter.ExtractionDimFilter;
import io.druid.query.search.search.FragmentSearchQuerySpec;
import io.druid.query.search.search.SearchHit;
import io.druid.query.search.search.SearchQuery;
@ -38,10 +41,8 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@ -239,6 +240,44 @@ public class SearchQueryRunnerTest
);
}
@Test
public void testSearchWithExtractionFilter1()
{
final String automotiveSnowman = "automotive☃";
Map<String, Set<String>> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
expectedResults.put(
QueryRunnerTestHelper.qualityDimension, new HashSet<String>(Arrays.asList(automotiveSnowman))
);
final LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(
new MapLookupExtractor(ImmutableMap.of("automotive", automotiveSnowman)),
true,
null,
true
);
checkSearchQuery(
Druids.newSearchQueryBuilder()
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.filters(new ExtractionDimFilter(QueryRunnerTestHelper.qualityDimension, automotiveSnowman, lookupExtractionFn, null))
.intervals(QueryRunnerTestHelper.fullOnInterval)
.dimensions(
new ExtractionDimensionSpec(
QueryRunnerTestHelper.qualityDimension,
null,
lookupExtractionFn,
null
)
)
.query("")
.build(),
expectedResults
);
}
@Test
public void testSearchWithSingleFilter1()
{

View File

@ -22,6 +22,8 @@ import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.Druids;
import io.druid.query.Query;
import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.dimension.LegacyDimensionSpec;
import org.junit.Assert;
import org.junit.Test;
@ -35,11 +37,11 @@ public class SearchQueryTest
public void testQuerySerialization() throws IOException
{
Query query = Druids.newSearchQueryBuilder()
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.intervals(QueryRunnerTestHelper.fullOnInterval)
.query("a")
.build();
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.intervals(QueryRunnerTestHelper.fullOnInterval)
.query("a")
.build();
String json = jsonMapper.writeValueAsString(query);
Query serdeQuery = jsonMapper.readValue(json, Query.class);
@ -47,4 +49,60 @@ public class SearchQueryTest
Assert.assertEquals(query, serdeQuery);
}
@Test
public void testEquals()
{
Query query1 = Druids.newSearchQueryBuilder()
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.intervals(QueryRunnerTestHelper.fullOnInterval)
.dimensions(
new DefaultDimensionSpec(
QueryRunnerTestHelper.qualityDimension,
QueryRunnerTestHelper.qualityDimension
)
)
.query("a")
.build();
Query query2 = Druids.newSearchQueryBuilder()
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.intervals(QueryRunnerTestHelper.fullOnInterval)
.dimensions(
new DefaultDimensionSpec(
QueryRunnerTestHelper.qualityDimension,
QueryRunnerTestHelper.qualityDimension
)
)
.query("a")
.build();
Assert.assertEquals(query1, query2);
}
@Test
public void testSerDe() throws IOException
{
Query query = Druids.newSearchQueryBuilder()
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.intervals(QueryRunnerTestHelper.fullOnInterval)
.dimensions(new LegacyDimensionSpec(QueryRunnerTestHelper.qualityDimension))
.query("a")
.build();
final String json =
"{\"queryType\":\"search\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"filter\":null,\"granularity\":{\"type\":\"all\"},\"limit\":1000,\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"]},\"searchDimensions\":[\""
+ QueryRunnerTestHelper.qualityDimension
+ "\"],\"query\":{\"type\":\"insensitive_contains\",\"value\":\"a\"},\"sort\":{\"type\":\"lexicographic\"},\"context\":null}";
final Query serdeQuery = jsonMapper.readValue(json, Query.class);
Assert.assertEquals(query.toString(), serdeQuery.toString());
Assert.assertEquals(query, serdeQuery);
final String json2 =
"{\"queryType\":\"search\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"filter\":null,\"granularity\":{\"type\":\"all\"},\"limit\":1000,\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"]},\"searchDimensions\":[\"quality\"],\"query\":{\"type\":\"insensitive_contains\",\"value\":\"a\"},\"sort\":{\"type\":\"lexicographic\"},\"context\":null}";
final Query serdeQuery2 = jsonMapper.readValue(json2, Query.class);
Assert.assertEquals(query.toString(), serdeQuery2.toString());
Assert.assertEquals(query, serdeQuery2);
}
}