mirror of https://github.com/apache/druid.git
Add context dimension to DefaultQueryMetrics (#10578)
* Add context dimension to DefaultQueryMetrics * remove redundant addition of context dimension from DruidMetrics now that QueryMetrics adds it by default * update SearchQueryMetrics to reflect the same pattern as other default dimensions in QueryMetrics * add PublicApi annotation for context in QueryMetrics Interface
This commit is contained in:
parent
e7e07eab11
commit
2e02eebd9d
|
@ -89,6 +89,7 @@ public class DefaultQueryMetrics<QueryType extends Query<?>> implements QueryMet
|
|||
queryId(query);
|
||||
subQueryId(query);
|
||||
sqlQueryId(query);
|
||||
context(query);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -69,7 +69,6 @@ public class DruidMetrics
|
|||
} else {
|
||||
queryMetrics = queryMetricsFactory.makeMetrics(query);
|
||||
}
|
||||
queryMetrics.context(query);
|
||||
queryMetrics.remoteAddress(remoteAddr);
|
||||
return queryMetrics;
|
||||
}
|
||||
|
|
|
@ -123,8 +123,8 @@ import java.util.List;
|
|||
* implement extra methods from SegmentMetadataQueryMetrics interfaces with empty bodies, AND DELEGATE ALL OTHER
|
||||
* METHODS TO A QueryMetrics OBJECT, provided as a sole parameter in DefaultSegmentMetadataQueryMetrics constructor.
|
||||
*
|
||||
* NOTE: query(), dataSource(), queryType(), interval(), hasFilters(), duration(), queryId() and sqlQueryId() methods
|
||||
* or any "pre-query-execution-time" methods should either have a empty body or throw exception.
|
||||
* NOTE: query(), dataSource(), queryType(), interval(), hasFilters(), duration(), queryId(), sqlQueryId(), and
|
||||
* context() methods or any "pre-query-execution-time" methods should either have a empty body or throw exception.
|
||||
*
|
||||
* 3. Create `interface SegmentMetadataQueryMetricsFactory` with a single method
|
||||
* `SegmentMetadataQueryMetrics makeMetrics(SegmentMetadataQuery query);`.
|
||||
|
@ -217,6 +217,7 @@ public interface QueryMetrics<QueryType extends Query<?>>
|
|||
/**
|
||||
* Sets {@link Query#getContext()} of the given query as dimension.
|
||||
*/
|
||||
@PublicApi
|
||||
void context(QueryType query);
|
||||
|
||||
void server(String host);
|
||||
|
|
|
@ -108,7 +108,7 @@ public class DefaultSearchQueryMetrics implements SearchQueryMetrics
|
|||
@Override
|
||||
public void context(SearchQuery query)
|
||||
{
|
||||
delegateQueryMetrics.context(query);
|
||||
throw new ISE("Unsupported method in default query metrics implementation.");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.apache.druid.query;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.apache.druid.java.util.common.granularity.Granularities;
|
||||
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
|
||||
|
@ -62,11 +63,12 @@ public class DefaultQueryMetricsTest
|
|||
.aggregators(new CountAggregatorFactory("count"))
|
||||
.threshold(5)
|
||||
.filters(new SelectorDimFilter("tags", "t3", null))
|
||||
.context(ImmutableMap.of("testKey", "testValue"))
|
||||
.build();
|
||||
queryMetrics.query(query);
|
||||
queryMetrics.reportQueryTime(0).emit(serviceEmitter);
|
||||
Map<String, Object> actualEvent = cachingEmitter.getLastEmittedEvent().toMap();
|
||||
Assert.assertEquals(12, actualEvent.size());
|
||||
Assert.assertEquals(13, actualEvent.size());
|
||||
Assert.assertTrue(actualEvent.containsKey("feed"));
|
||||
Assert.assertTrue(actualEvent.containsKey("timestamp"));
|
||||
Assert.assertEquals("", actualEvent.get("host"));
|
||||
|
@ -82,6 +84,7 @@ public class DefaultQueryMetricsTest
|
|||
Assert.assertEquals("", actualEvent.get(DruidMetrics.ID));
|
||||
Assert.assertEquals("query/time", actualEvent.get("metric"));
|
||||
Assert.assertEquals(0L, actualEvent.get("value"));
|
||||
Assert.assertEquals(ImmutableMap.of("testKey", "testValue"), actualEvent.get("context"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -75,7 +75,7 @@ public class DefaultGroupByQueryMetricsTest
|
|||
|
||||
queryMetrics.reportQueryTime(0).emit(serviceEmitter);
|
||||
Map<String, Object> actualEvent = cachingEmitter.getLastEmittedEvent().toMap();
|
||||
Assert.assertEquals(15, actualEvent.size());
|
||||
Assert.assertEquals(16, actualEvent.size());
|
||||
Assert.assertTrue(actualEvent.containsKey("feed"));
|
||||
Assert.assertTrue(actualEvent.containsKey("timestamp"));
|
||||
Assert.assertEquals("", actualEvent.get("host"));
|
||||
|
@ -87,6 +87,7 @@ public class DefaultGroupByQueryMetricsTest
|
|||
Assert.assertEquals("true", actualEvent.get("hasFilters"));
|
||||
Assert.assertEquals(expectedInterval.toDuration().toString(), actualEvent.get("duration"));
|
||||
Assert.assertEquals("", actualEvent.get(DruidMetrics.ID));
|
||||
Assert.assertEquals(ImmutableMap.of("bySegment", true), actualEvent.get("context"));
|
||||
|
||||
// GroupBy-specific dimensions
|
||||
Assert.assertEquals("1", actualEvent.get("numDimensions"));
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.apache.druid.query.search;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
|
||||
import org.apache.druid.query.CachingEmitter;
|
||||
|
@ -57,6 +58,7 @@ public class DefaultSearchQueryMetricsTest
|
|||
ImmutableSet.of("t3"),
|
||||
null
|
||||
))
|
||||
.context(ImmutableMap.of("testKey", "testValue"))
|
||||
.build();
|
||||
|
||||
SearchQueryMetrics queryMetrics = DefaultSearchQueryMetricsFactory.instance().makeMetrics(query);
|
||||
|
@ -65,7 +67,7 @@ public class DefaultSearchQueryMetricsTest
|
|||
|
||||
queryMetrics.reportQueryTime(0).emit(serviceEmitter);
|
||||
Map<String, Object> actualEvent = cachingEmitter.getLastEmittedEvent().toMap();
|
||||
Assert.assertEquals(12, actualEvent.size());
|
||||
Assert.assertEquals(13, actualEvent.size());
|
||||
Assert.assertTrue(actualEvent.containsKey("feed"));
|
||||
Assert.assertTrue(actualEvent.containsKey("timestamp"));
|
||||
Assert.assertEquals("", actualEvent.get("host"));
|
||||
|
@ -79,6 +81,7 @@ public class DefaultSearchQueryMetricsTest
|
|||
Assert.assertEquals("false", actualEvent.get("hasFilters"));
|
||||
Assert.assertEquals(expectedIntervals.get(0).toDuration().toString(), actualEvent.get("duration"));
|
||||
Assert.assertEquals("", actualEvent.get(DruidMetrics.ID));
|
||||
Assert.assertEquals(ImmutableMap.of("testKey", "testValue"), actualEvent.get("context"));
|
||||
|
||||
// Metric
|
||||
Assert.assertEquals("query/time", actualEvent.get("metric"));
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.apache.druid.query.timeseries;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
|
||||
import org.apache.druid.query.CachingEmitter;
|
||||
import org.apache.druid.query.DefaultQueryMetricsTest;
|
||||
|
@ -54,12 +55,13 @@ public class DefaultTimeseriesQueryMetricsTest
|
|||
.aggregators(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM)
|
||||
.postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)
|
||||
.descending(true)
|
||||
.context(ImmutableMap.of("testKey", "testValue"))
|
||||
.build();
|
||||
queryMetrics.query(query);
|
||||
|
||||
queryMetrics.reportQueryTime(0).emit(serviceEmitter);
|
||||
Map<String, Object> actualEvent = cachingEmitter.getLastEmittedEvent().toMap();
|
||||
Assert.assertEquals(15, actualEvent.size());
|
||||
Assert.assertEquals(16, actualEvent.size());
|
||||
Assert.assertTrue(actualEvent.containsKey("feed"));
|
||||
Assert.assertTrue(actualEvent.containsKey("timestamp"));
|
||||
Assert.assertEquals("", actualEvent.get("host"));
|
||||
|
@ -73,6 +75,7 @@ public class DefaultTimeseriesQueryMetricsTest
|
|||
Assert.assertEquals("false", actualEvent.get("hasFilters"));
|
||||
Assert.assertEquals(expectedIntervals.get(0).toDuration().toString(), actualEvent.get("duration"));
|
||||
Assert.assertEquals("", actualEvent.get(DruidMetrics.ID));
|
||||
Assert.assertEquals(ImmutableMap.of("testKey", "testValue"), actualEvent.get("context"));
|
||||
|
||||
// Timeseries-specific dimensions
|
||||
Assert.assertEquals("2", actualEvent.get("numMetrics"));
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.apache.druid.query.topn;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.apache.druid.java.util.common.granularity.Granularities;
|
||||
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
|
||||
|
@ -64,12 +65,13 @@ public class DefaultTopNQueryMetricsTest
|
|||
.aggregators(new CountAggregatorFactory("count"))
|
||||
.threshold(5)
|
||||
.filters(new SelectorDimFilter("tags", "t3", null))
|
||||
.context(ImmutableMap.of("testKey", "testValue"))
|
||||
.build();
|
||||
queryMetrics.query(query);
|
||||
|
||||
queryMetrics.reportQueryTime(0).emit(serviceEmitter);
|
||||
Map<String, Object> actualEvent = cachingEmitter.getLastEmittedEvent().toMap();
|
||||
Assert.assertEquals(16, actualEvent.size());
|
||||
Assert.assertEquals(17, actualEvent.size());
|
||||
Assert.assertTrue(actualEvent.containsKey("feed"));
|
||||
Assert.assertTrue(actualEvent.containsKey("timestamp"));
|
||||
Assert.assertEquals("", actualEvent.get("host"));
|
||||
|
@ -83,6 +85,7 @@ public class DefaultTopNQueryMetricsTest
|
|||
Assert.assertEquals("true", actualEvent.get("hasFilters"));
|
||||
Assert.assertEquals(expectedIntervals.get(0).toDuration().toString(), actualEvent.get("duration"));
|
||||
Assert.assertEquals("", actualEvent.get(DruidMetrics.ID));
|
||||
Assert.assertEquals(ImmutableMap.of("testKey", "testValue"), actualEvent.get("context"));
|
||||
|
||||
// TopN-specific dimensions
|
||||
Assert.assertEquals("5", actualEvent.get("threshold"));
|
||||
|
|
Loading…
Reference in New Issue