Merge pull request #1818 from metamx/moreSerdeTestsForLookupExtraction

Add hashCode and equals to stock lookups
This commit is contained in:
Xavier Léauté 2015-10-12 10:55:38 -07:00
commit f4e4dac434
5 changed files with 202 additions and 2 deletions

View File

@ -108,4 +108,26 @@ public class LookupExtractionFn extends FunctionalExtraction
throw Throwables.propagate(ex); throw Throwables.propagate(ex);
} }
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LookupExtractionFn that = (LookupExtractionFn) o;
return lookup.equals(that.lookup);
}
@Override
public int hashCode()
{
return lookup.hashCode();
}
} }

View File

@ -84,4 +84,25 @@ public class MapLookupExtractor implements LookupExtractor
throw Throwables.propagate(ex); throw Throwables.propagate(ex);
} }
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MapLookupExtractor that = (MapLookupExtractor) o;
return map.equals(that.map);
}
@Override
public int hashCode()
{
return map.hashCode();
}
} }

View File

@ -0,0 +1,78 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.extraction;
import com.google.common.collect.ImmutableMap;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
public class MapLookupExtractorTest
{
private final MapLookupExtractor fn = new MapLookupExtractor(ImmutableMap.of("foo", "bar"));
@Test
public void testGetMap() throws Exception
{
Assert.assertEquals(ImmutableMap.of("foo", "bar"), fn.getMap());
}
@Test
public void testApply() throws Exception
{
Assert.assertEquals("bar", fn.apply("foo"));
}
@Test
public void testGetCacheKey() throws Exception
{
final MapLookupExtractor fn2 = new MapLookupExtractor(ImmutableMap.of("foo", "bar"));
Assert.assertArrayEquals(fn.getCacheKey(), fn2.getCacheKey());
final MapLookupExtractor fn3 = new MapLookupExtractor(ImmutableMap.of("foo2", "bar"));
Assert.assertFalse(Arrays.equals(fn.getCacheKey(), fn3.getCacheKey()));
final MapLookupExtractor fn4 = new MapLookupExtractor(ImmutableMap.of("foo", "bar2"));
Assert.assertFalse(Arrays.equals(fn.getCacheKey(), fn4.getCacheKey()));
}
@Test
public void testEquals() throws Exception
{
final MapLookupExtractor fn2 = new MapLookupExtractor(ImmutableMap.of("foo", "bar"));
Assert.assertEquals(fn, fn2);
final MapLookupExtractor fn3 = new MapLookupExtractor(ImmutableMap.of("foo2", "bar"));
Assert.assertNotEquals(fn, fn3);
final MapLookupExtractor fn4 = new MapLookupExtractor(ImmutableMap.of("foo", "bar2"));
Assert.assertNotEquals(fn, fn4);
}
@Test
public void testHashCode() throws Exception
{
final MapLookupExtractor fn2 = new MapLookupExtractor(ImmutableMap.of("foo", "bar"));
Assert.assertEquals(fn.hashCode(), fn2.hashCode());
final MapLookupExtractor fn3 = new MapLookupExtractor(ImmutableMap.of("foo2", "bar"));
Assert.assertNotEquals(fn.hashCode(), fn3.hashCode());
final MapLookupExtractor fn4 = new MapLookupExtractor(ImmutableMap.of("foo", "bar2"));
Assert.assertNotEquals(fn.hashCode(), fn4.hashCode());
}
}

View File

@ -80,6 +80,39 @@ public class LookupExtractionFnTest
this.injective = injective; this.injective = injective;
} }
@Test
public void testEqualsAndHash(){
if (retainMissing && !Strings.isNullOrEmpty(replaceMissing)) {
// skip
return;
}
final LookupExtractionFn lookupExtractionFn1 = new LookupExtractionFn(
new MapLookupExtractor(ImmutableMap.of("foo", "bar")),
retainMissing,
replaceMissing,
injective
);
final LookupExtractionFn lookupExtractionFn2 = new LookupExtractionFn(
new MapLookupExtractor(ImmutableMap.of("foo", "bar")),
retainMissing,
replaceMissing,
injective
);
final LookupExtractionFn lookupExtractionFn3 = new LookupExtractionFn(
new MapLookupExtractor(ImmutableMap.of("foo", "bar2")),
retainMissing,
replaceMissing,
injective
);
Assert.assertEquals(lookupExtractionFn1, lookupExtractionFn2);
Assert.assertEquals(lookupExtractionFn1.hashCode(), lookupExtractionFn2.hashCode());
Assert.assertNotEquals(lookupExtractionFn1, lookupExtractionFn3);
Assert.assertNotEquals(lookupExtractionFn1.hashCode(), lookupExtractionFn3.hashCode());
}
@Test @Test
public void testSimpleSerDe() throws IOException public void testSimpleSerDe() throws IOException
{ {

View File

@ -18,6 +18,7 @@
package io.druid.query.topn; package io.druid.query.topn;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import io.druid.jackson.DefaultObjectMapper; import io.druid.jackson.DefaultObjectMapper;
@ -26,7 +27,10 @@ import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory;
import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory;
import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.PostAggregator;
import io.druid.query.dimension.ExtractionDimensionSpec;
import io.druid.query.dimension.LegacyDimensionSpec; import io.druid.query.dimension.LegacyDimensionSpec;
import io.druid.query.extraction.LookupExtractionFn;
import io.druid.query.extraction.MapLookupExtractor;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -76,8 +80,43 @@ public class TopNQueryTest
Assert.assertEquals(query, serdeQuery); Assert.assertEquals(query, serdeQuery);
} }
@Test @Test
public void testQuerySerdeWithAlphaNumericTopNMetricSpec() throws IOException{ public void testQuerySerdeWithLookupExtractionFn() throws IOException
{
final TopNQuery expectedQuery = new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(
new ExtractionDimensionSpec(
marketDimension,
marketDimension,
new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("foo", "bar")), true, null, false),
null
)
)
.metric(new NumericTopNMetricSpec(indexMetric))
.threshold(2)
.intervals(fullOnInterval.getIntervals())
.aggregators(
Lists.<AggregatorFactory>newArrayList(
Iterables.concat(
commonAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
)
)
)
)
.build();
final String str = jsonMapper.writeValueAsString(expectedQuery);
Assert.assertEquals(expectedQuery, jsonMapper.readValue(str, TopNQuery.class));
}
@Test
public void testQuerySerdeWithAlphaNumericTopNMetricSpec() throws IOException
{
TopNQuery expectedQuery = new TopNQueryBuilder() TopNQuery expectedQuery = new TopNQueryBuilder()
.dataSource(dataSource) .dataSource(dataSource)
.granularity(allGran) .granularity(allGran)
@ -106,7 +145,14 @@ public class TopNQueryTest
+ " \"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"\n" + " \"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"\n"
+ " ]\n" + " ]\n"
+ "}"; + "}";
TopNQuery actualQuery = jsonMapper.readValue(jsonMapper.writeValueAsString(jsonMapper.readValue(jsonQuery, TopNQuery.class)), TopNQuery.class); TopNQuery actualQuery = jsonMapper.readValue(
jsonMapper.writeValueAsString(
jsonMapper.readValue(
jsonQuery,
TopNQuery.class
)
), TopNQuery.class
);
Assert.assertEquals(expectedQuery, actualQuery); Assert.assertEquals(expectedQuery, actualQuery);
} }