Rollup add default metrics to histo groups (#34534)

* Rollup: Adding default metrics for histo group timefield (#34379)

* Rollup: Adding default histo metrics and tests

* fixing failing client side test with new default values

* Adding HLRC docs for default values

* Addressing PR comments

* Removing value_count default agg

* Updating docs for rollups

* Minor interval change
This commit is contained in:
Benjamin Trent 2018-10-19 07:23:25 -05:00 committed by GitHub
parent 3036ab1048
commit 4236358f5d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 403 additions and 18 deletions

View File

@ -42,6 +42,11 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
/**
* This class holds the configuration details of a rollup job, such as the groupings, metrics, what
* index to rollup and where to roll them to.
*
* When the configuration is stored server side, if there is no {@link MetricConfig} for the fields referenced in the
* {@link HistogramGroupConfig} and {@link DateHistogramGroupConfig} in the passed {@link GroupConfig},
* then default metrics of {@code ["min", "max"]} are provided
*
*/
public class RollupJobConfig implements Validatable, ToXContentObject {

View File

@ -46,6 +46,7 @@ import org.elasticsearch.client.rollup.job.config.GroupConfig;
import org.elasticsearch.client.rollup.job.config.MetricConfig;
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
@ -168,7 +169,9 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
public void testPutAndGetRollupJob() throws Exception {
// TODO expand this to also test with histogram and terms?
final GroupConfig groups = new GroupConfig(new DateHistogramGroupConfig("date", DateHistogramInterval.DAY));
final List<MetricConfig> metrics = Collections.singletonList(new MetricConfig("value", SUPPORTED_METRICS));
final List<MetricConfig> metrics = Arrays.asList(
new MetricConfig("value", SUPPORTED_METRICS),
new MetricConfig("date", Arrays.asList(MaxAggregationBuilder.NAME)));
final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(30, 600));
PutRollupJobRequest putRollupJobRequest =
@ -196,6 +199,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
assertEquals(groups.getDateHistogram().getTimeZone(), source.get("date.date_histogram.time_zone"));
for (MetricConfig metric : metrics) {
if (metric.getField().equals("value")) {
for (String name : metric.getMetrics()) {
Number value = (Number) source.get(metric.getField() + "." + name + ".value");
if ("min".equals(name)) {
@ -212,6 +216,12 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
assertEquals(numDocs, value.intValue());
}
}
} else {
Number value = (Number) source.get(metric.getField() + ".max.value");
assertEquals(
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime("2018-01-01T00:59:50").getMillis(),
value.longValue());
}
}
});

View File

@ -119,6 +119,68 @@ include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-put-rollup-
<2> Adds the metrics to compute on the `temperature` field
<3> Adds the metrics to compute on the `voltage` field
By default, metrics `min`/`max` for the fields in `DateHistogramGroupConfig` and
`HistogramGroupConfig` are added to the configuration unless the user already provided
metrics for those fields.
So, for the following configuration:
[source,js]
--------------------------------------------------
"groups" : {
"date_histogram": {
"field": "timestamp",
"interval": "1h",
"delay": "7d",
"time_zone": "UTC"
},
"terms": {
"fields": ["hostname", "datacenter"]
},
"histogram": {
"fields": ["load", "net_in", "net_out"],
"interval": 5
},
},
"metrics": [
{
"field": "load",
"metrics": ["max"]
},
{
"field": "net_in",
"metrics": ["max"]
}
]
--------------------------------------------------
// NOTCONSOLE
The following will be the metrics in the configuration after
the defaults are added server side. Note the default metrics
provided for the fields `timestamp` and `net_out`
[source,js]
--------------------------------------------------
"metrics": [
{
"field": "load",
"metrics": ["max"]
},
{
"field": "net_in",
"metrics": ["max"]
},
{
"field": "timestamp",
"metrics": ["min", "max"]
},
{
"field": "net_out",
"metrics": ["min", "max"]
}
]
--------------------------------------------------
// NOTCONSOLE
[[java-rest-high-x-pack-rollup-put-rollup-job-execution]]
==== Execution

View File

@ -88,6 +88,13 @@ Which will yield the following response:
"metrics" : [
"avg"
]
},
{
"field": "timestamp",
"metrics": [
"max",
"min"
]
}
],
"timeout" : "20s",
@ -208,6 +215,13 @@ Which will yield the following response:
"metrics" : [
"avg"
]
},
{
"field": "timestamp",
"metrics": [
"min",
"max"
]
}
],
"timeout" : "20s",
@ -257,6 +271,13 @@ Which will yield the following response:
"metrics" : [
"avg"
]
},
{
"field": "timestamp",
"metrics": [
"min",
"max"
]
}
],
"timeout" : "20s",

View File

@ -68,7 +68,7 @@ PUT _xpack/rollup/job/sensor
"groups" : {
"date_histogram": {
"field": "timestamp",
"interval": "1h",
"interval": "60m",
"delay": "7d"
},
"terms": {
@ -99,3 +99,83 @@ When the job is created, you receive the following results:
}
----
// TESTRESPONSE
By default the metrics `min`/`max` are added
for the fields in the `date_histogram` and `histogram` configurations.
If this behavior is not desired, explicitly configure metrics
for those fields. This will override the defaults.
If the following is provided
[source,js]
--------------------------------------------------
PUT _xpack/rollup/job/sensor2
{
"index_pattern": "sensor-*",
"rollup_index": "sensor_rollup",
"cron": "*/30 * * * * ?",
"page_size" :1000,
"groups" : {
"date_histogram": {
"field": "timestamp",
"interval": "60m",
"delay": "7d"
},
"histogram": {
"fields": ["voltage", "temperature"],
"interval": 5
}
},
"metrics": [
{
"field": "temperature",
"metrics": ["min", "max", "sum"]
}
]
}
--------------------------------------------------
// NOTCONSOLE
// TEST[setup:sensor_index]
The actual config when created in the cluster will look as follows.
[source,js]
--------------------------------------------------
{
"index_pattern": "sensor-*",
"rollup_index": "sensor_rollup",
"cron": "*/30 * * * * ?",
"page_size" :1000,
"groups" : {
"date_histogram": {
"field": "timestamp",
"interval": "60m",
"delay": "7d"
},
"histogram": {
"fields": ["voltage", "temperature"],
"interval": 5
}
},
"metrics": [
{
"field": "temperature",
"metrics": ["min", "max", "sum"]
},
{
"field": "voltage", <1>
"metrics": ["min", "max"]
},
{
"field": "timestamp", <1>
"metrics": ["min", "max"]
}
]
}
--------------------------------------------------
// NOTCONSOLE
<1> Notice the new default metrics gathered for `voltage` and `timestamp`.
Since these fields were referenced in `groups.histogram` and
`groups.date_histogram` configurations
respectively and no metrics were requested for them,
they both got the default metrics of `["min", "max"]`.

View File

@ -124,6 +124,12 @@ Which will yield the following response:
"time_zone" : "UTC",
"interval" : "1h",
"delay": "7d"
},
{
"agg": "max"
},
{
"agg": "min"
}
],
"voltage" : [

View File

@ -120,6 +120,12 @@ This will yield the following response:
"time_zone" : "UTC",
"interval" : "1h",
"delay": "7d"
},
{
"agg" : "max"
},
{
"agg" : "min"
}
],
"voltage" : [

View File

@ -20,14 +20,19 @@ import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
@ -46,6 +51,7 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject {
private static final String PAGE_SIZE = "page_size";
private static final String INDEX_PATTERN = "index_pattern";
private static final String ROLLUP_INDEX = "rollup_index";
private static final List<String> DEFAULT_HISTO_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME);
private final String id;
private final String indexPattern;
@ -123,7 +129,7 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject {
this.indexPattern = indexPattern;
this.rollupIndex = rollupIndex;
this.groupConfig = groupConfig;
this.metricsConfig = metricsConfig != null ? metricsConfig : Collections.emptyList();
this.metricsConfig = addDefaultMetricsIfNeeded(metricsConfig, groupConfig);
this.timeout = timeout != null ? timeout : DEFAULT_TIMEOUT;
this.cron = cron;
this.pageSize = pageSize;
@ -277,4 +283,23 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject {
public static RollupJobConfig fromXContent(final XContentParser parser, @Nullable final String optionalJobId) throws IOException {
return PARSER.parse(parser, optionalJobId);
}
private static List<MetricConfig> addDefaultMetricsIfNeeded(List<MetricConfig> metrics, GroupConfig groupConfig) {
List<MetricConfig> inputMetrics = metrics != null ? new ArrayList<>(metrics) : new ArrayList<>();
if (groupConfig != null) {
String timeField = groupConfig.getDateHistogram().getField();
Set<String> currentFields = inputMetrics.stream().map(MetricConfig::getField).collect(Collectors.toSet());
if (currentFields.contains(timeField) == false) {
inputMetrics.add(new MetricConfig(timeField, DEFAULT_HISTO_METRICS));
}
if (groupConfig.getHistogram() != null) {
for (String histoField : groupConfig.getHistogram().getFields()) {
if (currentFields.contains(histoField) == false) {
inputMetrics.add(new MetricConfig(histoField, DEFAULT_HISTO_METRICS));
}
}
}
}
return Collections.unmodifiableList(inputMetrics);
}
}

View File

@ -11,10 +11,23 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.stream.Collectors;
import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween;
import static java.util.Collections.emptyList;
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomCron;
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomDateHistogramGroupConfig;
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomHistogramGroupConfig;
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomMetricsConfigs;
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomRollupJobConfig;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.isIn;
public class RollupJobConfigTests extends AbstractSerializingTestCase<RollupJobConfig> {
@ -163,4 +176,69 @@ public class RollupJobConfigTests extends AbstractSerializingTestCase<RollupJobC
null, emptyList(), sample.getTimeout()));
assertThat(e.getMessage(), equalTo("At least one grouping or metric must be configured"));
}
public void testDefaultFieldsForDateHistograms() {
final Random random = random();
DateHistogramGroupConfig dateHistogramGroupConfig = randomDateHistogramGroupConfig(random);
HistogramGroupConfig histogramGroupConfig1 = randomHistogramGroupConfig(random);
List<MetricConfig> metrics = new ArrayList<>(randomMetricsConfigs(random));
for (String histoField : histogramGroupConfig1.getFields()) {
metrics.add(new MetricConfig(histoField, Arrays.asList("max")));
}
GroupConfig groupConfig = new GroupConfig(dateHistogramGroupConfig, histogramGroupConfig1, null);
RollupJobConfig rollupJobConfig = new RollupJobConfig(
randomAsciiAlphanumOfLengthBetween(random, 1, 20),
"indexes_*",
"rollup_" + randomAsciiAlphanumOfLengthBetween(random, 1, 20),
randomCron(),
randomIntBetween(1, 10),
groupConfig,
metrics,
null);
Set<String> metricFields = rollupJobConfig.getMetricsConfig().stream().map(MetricConfig::getField).collect(Collectors.toSet());
assertThat(dateHistogramGroupConfig.getField(), isIn(metricFields));
List<String> histoFields = Arrays.asList(histogramGroupConfig1.getFields());
rollupJobConfig.getMetricsConfig().forEach(metricConfig -> {
if (histoFields.contains(metricConfig.getField())) {
// Since it is explicitly included, the defaults should not be added
assertThat(metricConfig.getMetrics(), containsInAnyOrder("max"));
}
if (metricConfig.getField().equals(dateHistogramGroupConfig.getField())) {
assertThat(metricConfig.getMetrics(), containsInAnyOrder("max", "min"));
}
});
}
public void testDefaultFieldsForHistograms() {
final Random random = random();
DateHistogramGroupConfig dateHistogramGroupConfig = randomDateHistogramGroupConfig(random);
HistogramGroupConfig histogramGroupConfig1 = randomHistogramGroupConfig(random);
List<MetricConfig> metrics = new ArrayList<>(randomMetricsConfigs(random));
metrics.add(new MetricConfig(dateHistogramGroupConfig.getField(), Arrays.asList("max")));
GroupConfig groupConfig = new GroupConfig(dateHistogramGroupConfig, histogramGroupConfig1, null);
RollupJobConfig rollupJobConfig = new RollupJobConfig(
randomAsciiAlphanumOfLengthBetween(random, 1, 20),
"indexes_*",
"rollup_" + randomAsciiAlphanumOfLengthBetween(random, 1, 20),
randomCron(),
randomIntBetween(1, 10),
groupConfig,
metrics,
null);
Set<String> metricFields = rollupJobConfig.getMetricsConfig().stream().map(MetricConfig::getField).collect(Collectors.toSet());
for (String histoField : histogramGroupConfig1.getFields()) {
assertThat(histoField, isIn(metricFields));
}
assertThat(dateHistogramGroupConfig.getField(), isIn(metricFields));
List<String> histoFields = Arrays.asList(histogramGroupConfig1.getFields());
rollupJobConfig.getMetricsConfig().forEach(metricConfig -> {
if (histoFields.contains(metricConfig.getField())) {
assertThat(metricConfig.getMetrics(), containsInAnyOrder("max", "min"));
}
if (metricConfig.getField().equals(dateHistogramGroupConfig.getField())) {
// Since it is explicitly included, the defaults should not be added
assertThat(metricConfig.getMetrics(), containsInAnyOrder("max"));
}
});
}
}

View File

@ -116,6 +116,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"the_histo.date_histogram.interval", "1ms",
"the_histo.date_histogram._count", 2,
"the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(),
"the_histo.min.value", 3.0,
"the_histo.max.value", 3.0,
"_rollup.id", job.getId()
)
));
@ -129,6 +131,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"the_histo.date_histogram.interval", "1ms",
"the_histo.date_histogram._count", 1,
"the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(),
"the_histo.min.value", 7.0,
"the_histo.max.value", 7.0,
"_rollup.id", job.getId()
)
));
@ -179,6 +183,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"counter.max.value", 20.0,
"counter.sum.value", 50.0,
"the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(),
"the_histo.min.value", (double) asLong("2015-03-31T03:00:00"),
"the_histo.max.value", (double) asLong("2015-03-31T03:40:00"),
"_rollup.id", job.getId()
)
));
@ -197,6 +203,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"counter.max.value", 55.0,
"counter.sum.value", 141.0,
"the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(),
"the_histo.min.value", (double) asLong("2015-03-31T04:00:00"),
"the_histo.max.value", (double) asLong("2015-03-31T04:40:00"),
"_rollup.id", job.getId()
)
));
@ -215,6 +223,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"counter.max.value", 80.0,
"counter.sum.value", 275.0,
"the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(),
"the_histo.min.value", (double) asLong("2015-03-31T05:00:00"),
"the_histo.max.value", (double) asLong("2015-03-31T05:40:00"),
"_rollup.id", job.getId()
)
));
@ -233,6 +243,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"counter.max.value", 100.0,
"counter.sum.value", 270.0,
"the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(),
"the_histo.min.value", (double) asLong("2015-03-31T06:00:00"),
"the_histo.max.value", (double) asLong("2015-03-31T06:40:00"),
"_rollup.id", job.getId()
)
));
@ -251,6 +263,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"counter.max.value", 200.0,
"counter.sum.value", 440.0,
"the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(),
"the_histo.min.value", (double) asLong("2015-03-31T07:00:00"),
"the_histo.max.value", (double) asLong("2015-03-31T07:40:00"),
"_rollup.id", job.getId()
)
));
@ -292,6 +306,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"the_histo.date_histogram.interval", "1m",
"the_histo.date_histogram._count", 2,
"the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(),
"the_histo.min.value", (double) (now - TimeValue.timeValueHours(5).getMillis()),
"the_histo.max.value", (double) (now - TimeValue.timeValueHours(5).getMillis()),
"_rollup.id", job.getId()
)
));
@ -305,6 +321,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"the_histo.date_histogram.interval", "1m",
"the_histo.date_histogram._count", 2,
"the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(),
"the_histo.min.value", (double) (now - TimeValue.timeValueMinutes(75).getMillis()),
"the_histo.max.value", (double) (now - TimeValue.timeValueMinutes(75).getMillis()),
"_rollup.id", job.getId()
)
));
@ -318,6 +336,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"the_histo.date_histogram.interval", "1m",
"the_histo.date_histogram._count", 1,
"the_histo.date_histogram.time_zone", DateTimeZone.UTC.toString(),
"the_histo.min.value", (double) (now - TimeValue.timeValueMinutes(61).getMillis()),
"the_histo.max.value", (double) (now - TimeValue.timeValueMinutes(61).getMillis()),
"_rollup.id", job.getId()
)
));
@ -357,6 +377,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"the_histo.date_histogram.interval", "1d",
"the_histo.date_histogram._count", 2,
"the_histo.date_histogram.time_zone", timeZone.toString(),
"the_histo.min.value", (double) (now - TimeValue.timeValueHours(10).getMillis()),
"the_histo.max.value", (double) (now - TimeValue.timeValueHours(8).getMillis()),
"_rollup.id", job.getId()
)
));
@ -376,6 +398,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"the_histo.date_histogram.interval", "1d",
"the_histo.date_histogram._count", 2,
"the_histo.date_histogram.time_zone", timeZone.toString(),
"the_histo.min.value", (double) (now - TimeValue.timeValueHours(10).getMillis()),
"the_histo.max.value", (double) (now - TimeValue.timeValueHours(8).getMillis()),
"_rollup.id", job.getId()
)
));
@ -389,6 +413,8 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
"the_histo.date_histogram.interval", "1d",
"the_histo.date_histogram._count", 5,
"the_histo.date_histogram.time_zone", timeZone.toString(),
"the_histo.min.value", (double) (now - TimeValue.timeValueHours(6).getMillis()),
"the_histo.max.value", (double) now,
"_rollup.id", job.getId()
)
));

View File

@ -62,6 +62,10 @@ setup:
- "min"
- "max"
- "sum"
- field: "the_field"
metrics:
- "max"
- "min"
timeout: "20s"
stats:
pages_processed: 0
@ -109,6 +113,10 @@ setup:
- "min"
- "max"
- "sum"
- field: "the_field"
metrics:
- "max"
- "min"
timeout: "20s"
stats:
pages_processed: 0
@ -156,6 +164,10 @@ setup:
- "min"
- "max"
- "sum"
- field: "the_field"
metrics:
- "max"
- "min"
timeout: "20s"
stats:
pages_processed: 0

View File

@ -63,6 +63,10 @@ setup:
- "min"
- "max"
- "sum"
- field: "the_field"
metrics:
- "max"
- "min"
timeout: "20s"
stats:
pages_processed: 0
@ -174,6 +178,10 @@ setup:
- "min"
- "max"
- "sum"
- field: "the_field"
metrics:
- "max"
- "min"
timeout: "20s"
stats:
pages_processed: 0
@ -200,6 +208,10 @@ setup:
- "min"
- "max"
- "sum"
- field: "the_field"
metrics:
- "max"
- "min"
timeout: "20s"
stats:
pages_processed: 0

View File

@ -77,6 +77,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -124,6 +126,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -136,6 +140,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -209,6 +215,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -221,6 +229,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -236,6 +246,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"

View File

@ -77,6 +77,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -124,6 +126,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -136,6 +140,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -184,6 +190,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -257,6 +265,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -269,6 +279,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -283,6 +295,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -360,6 +374,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -372,6 +388,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -386,6 +404,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"
@ -459,6 +479,8 @@ setup:
- agg: "date_histogram"
interval: "1h"
time_zone: "UTC"
- agg: "max"
- agg: "min"
value_field:
- agg: "min"
- agg: "max"

View File

@ -63,6 +63,10 @@ setup:
- "min"
- "max"
- "sum"
- field: "the_field"
metrics:
- "max"
- "min"
timeout: "20s"
stats:
pages_processed: 0

View File

@ -173,6 +173,8 @@ teardown:
hits.hits.0._source:
timestamp.date_histogram.time_zone: "UTC"
timestamp.date_histogram.timestamp: 0
timestamp.max.value: 123.0
timestamp.min.value: 123.0
value_field.max.value: 1232.0
_rollup.version: 2
timestamp.date_histogram.interval: "1s"
@ -334,6 +336,8 @@ teardown:
hits.hits.0._source:
timestamp.date_histogram.time_zone: "UTC"
timestamp.date_histogram.timestamp: 0
timestamp.max.value: 123.0
timestamp.min.value: 123.0
value_field.max.value: 1232.0
_rollup.version: 2
timestamp.date_histogram.interval: "1s"