mirror of https://github.com/apache/druid.git
pseudo integration tests for approximate histogram
This commit is contained in:
parent
c57c07f28a
commit
9b54124cd0
|
@ -0,0 +1,119 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.query.aggregation.histogram;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.metamx.common.guava.Sequence;
|
||||
import com.metamx.common.guava.Sequences;
|
||||
import io.druid.data.input.MapBasedRow;
|
||||
import io.druid.granularity.QueryGranularity;
|
||||
import io.druid.query.aggregation.AggregationTestHelper;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class ApproximateHistogramAggregationTest
|
||||
{
|
||||
private AggregationTestHelper helper;
|
||||
|
||||
public ApproximateHistogramAggregationTest()
|
||||
{
|
||||
ApproximateHistogramDruidModule module = new ApproximateHistogramDruidModule();
|
||||
module.configure(null);
|
||||
helper = new AggregationTestHelper(Lists.newArrayList(module.getJacksonModules()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIngestWithNullsIgnoredAndQuery() throws Exception
|
||||
{
|
||||
MapBasedRow row = ingestAndQuery(true);
|
||||
Assert.assertEquals(92.782760, row.getFloatMetric("index_min"), 0.0001);
|
||||
Assert.assertEquals(135.109191, row.getFloatMetric("index_max"), 0.0001);
|
||||
Assert.assertEquals(133.69340, row.getFloatMetric("index_quantile"), 0.0001);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIngestWithNullsToZeroAndQuery() throws Exception
|
||||
{
|
||||
MapBasedRow row = ingestAndQuery(false);
|
||||
Assert.assertEquals(0.0, row.getFloatMetric("index_min"), 0.0001);
|
||||
Assert.assertEquals(135.109191, row.getFloatMetric("index_max"), 0.0001);
|
||||
Assert.assertEquals(131.428176, row.getFloatMetric("index_quantile"), 0.0001);
|
||||
}
|
||||
|
||||
private MapBasedRow ingestAndQuery(boolean ignoreNulls) throws Exception
|
||||
{
|
||||
String ingestionAgg = ignoreNulls ? "approxHistogramFold" : "approxHistogram";
|
||||
|
||||
String metricSpec = "[{"
|
||||
+ "\"type\": \"" + ingestionAgg + "\","
|
||||
+ "\"name\": \"index_ah\","
|
||||
+ "\"fieldName\": \"index\""
|
||||
+ "}]";
|
||||
|
||||
String parseSpec = "{"
|
||||
+ "\"type\" : \"string\","
|
||||
+ "\"parseSpec\" : {"
|
||||
+ " \"format\" : \"tsv\","
|
||||
+ " \"timestampSpec\" : {"
|
||||
+ " \"column\" : \"timestamp\","
|
||||
+ " \"format\" : \"auto\""
|
||||
+ "},"
|
||||
+ " \"dimensionsSpec\" : {"
|
||||
+ " \"dimensions\": [],"
|
||||
+ " \"dimensionExclusions\" : [],"
|
||||
+ " \"spatialDimensions\" : []"
|
||||
+ " },"
|
||||
+ " \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]"
|
||||
+ " }"
|
||||
+ "}";
|
||||
|
||||
String query = "{"
|
||||
+ "\"queryType\": \"groupBy\","
|
||||
+ "\"dataSource\": \"test_datasource\","
|
||||
+ "\"granularity\": \"ALL\","
|
||||
+ "\"dimensions\": [],"
|
||||
+ "\"aggregations\": ["
|
||||
+ " { \"type\": \"approxHistogramFold\", \"name\": \"index_ah\", \"fieldName\": \"index_ah\" }"
|
||||
+ "],"
|
||||
+ "\"postAggregations\": ["
|
||||
+ " { \"type\": \"min\", \"name\": \"index_min\", \"fieldName\": \"index_ah\"},"
|
||||
+ " { \"type\": \"max\", \"name\": \"index_max\", \"fieldName\": \"index_ah\"},"
|
||||
+ " { \"type\": \"quantile\", \"name\": \"index_quantile\", \"fieldName\": \"index_ah\", \"probability\" : 0.99 }"
|
||||
+ "],"
|
||||
+ "\"intervals\": [ \"1970/2050\" ]"
|
||||
+ "}";
|
||||
|
||||
Sequence seq = helper.createIndexAndRunQueryOnSegment(
|
||||
this.getClass().getClassLoader().getResourceAsStream("sample.data.tsv"),
|
||||
parseSpec,
|
||||
metricSpec,
|
||||
0,
|
||||
QueryGranularity.NONE,
|
||||
50000,
|
||||
query
|
||||
);
|
||||
|
||||
return (MapBasedRow) Sequences.toList(seq, Lists.newArrayList()).get(0);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
2011-04-15T00:00:00.000Z spot automotive preferred apreferred 106.793700
|
||||
2011-04-15T00:00:00.000Z spot business preferred bpreferred 94.469747
|
||||
2011-04-15T00:00:00.000Z spot entertainment preferred epreferred 135.109191
|
||||
2011-04-15T00:00:00.000Z spot health preferred hpreferred 99.596909
|
||||
2011-04-15T00:00:00.000Z spot mezzanine preferred mpreferred 92.782760
|
||||
2011-04-15T00:00:00.000Z spot news preferred npreferred
|
||||
2011-04-15T00:00:00.000Z spot premium preferred ppreferred
|
||||
2011-04-15T00:00:00.000Z spot technology preferred tpreferred
|
||||
2011-04-15T00:00:00.000Z spot travel preferred tpreferred
|
||||
2011-04-15T00:00:00.000Z total_market mezzanine preferred mpreferred
|
||||
2011-04-15T00:00:00.000Z total_market premium preferred ppreferred
|
||||
2011-04-15T00:00:00.000Z upfront mezzanine preferred mpreferred
|
||||
2011-04-15T00:00:00.000Z upfront premium preferred ppreferred
|
Can't render this file because it has a wrong number of fields in line 6.
|
Loading…
Reference in New Issue