[ML-DataFrame] add support for fixed_interval, calendar_interval, remove interval (#42427)

* add support for fixed_interval, calendar_interval, remove interval

* adapt HLRC

* checkstyle

* add a hlrc to server test

* adapt yml test

* improve naming and doc

* improve interface and add test code for hlrc to server

* address review comments

* repair merge conflict

* fix date patterns

* address review comments

* remove assert for warning

* improve exception message

* use constants
This commit is contained in:
Hendrik Muhs 2019-05-24 19:54:06 +02:00
parent e28a9e99c4
commit 6d47ee9268
9 changed files with 476 additions and 163 deletions

View File

@ -20,9 +20,9 @@
package org.elasticsearch.client.dataframe.transforms.pivot; package org.elasticsearch.client.dataframe.transforms.pivot;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -31,7 +31,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInter
import java.io.IOException; import java.io.IOException;
import java.time.ZoneId; import java.time.ZoneId;
import java.time.ZoneOffset; import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Objects; import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
@ -43,32 +47,164 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
private static final ParseField TIME_ZONE = new ParseField("time_zone"); private static final ParseField TIME_ZONE = new ParseField("time_zone");
private static final ParseField FORMAT = new ParseField("format"); private static final ParseField FORMAT = new ParseField("format");
// From DateHistogramAggregationBuilder in core, transplanted and modified to a set
// so we don't need to import a dependency on the class
private static final Set<String> DATE_FIELD_UNITS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
"year",
"1y",
"quarter",
"1q",
"month",
"1M",
"week",
"1w",
"day",
"1d",
"hour",
"1h",
"minute",
"1m",
"second",
"1s")));
/**
* Interval can be specified in 2 ways:
*
* fixed_interval fixed intervals like 1h, 1m, 1d
* calendar_interval calendar aware intervals like 1M, 1Y, ...
*
* Note: data frames do not support the deprecated interval option
*/
public interface Interval extends ToXContentFragment {
String getName();
DateHistogramInterval getInterval();
}
public static class FixedInterval implements Interval {
private static final String NAME = "fixed_interval";
private final DateHistogramInterval interval;
public FixedInterval(DateHistogramInterval interval) {
this.interval = interval;
}
@Override
public String getName() {
return NAME;
}
@Override
public DateHistogramInterval getInterval() {
return interval;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(NAME);
interval.toXContent(builder, params);
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final FixedInterval that = (FixedInterval) other;
return Objects.equals(this.interval, that.interval);
}
@Override
public int hashCode() {
return Objects.hash(interval);
}
}
public static class CalendarInterval implements Interval {
private static final String NAME = "calendar_interval";
private final DateHistogramInterval interval;
public CalendarInterval(DateHistogramInterval interval) {
this.interval = interval;
if (DATE_FIELD_UNITS.contains(interval.toString()) == false) {
throw new IllegalArgumentException("The supplied interval [" + interval + "] could not be parsed " +
"as a calendar interval.");
}
}
@Override
public String getName() {
return NAME;
}
@Override
public DateHistogramInterval getInterval() {
return interval;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(NAME);
interval.toXContent(builder, params);
return builder;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final CalendarInterval that = (CalendarInterval) other;
return Objects.equals(this.interval, that.interval);
}
@Override
public int hashCode() {
return Objects.hash(interval);
}
}
private static final ConstructingObjectParser<DateHistogramGroupSource, Void> PARSER = private static final ConstructingObjectParser<DateHistogramGroupSource, Void> PARSER =
new ConstructingObjectParser<>("date_histogram_group_source", new ConstructingObjectParser<>("date_histogram_group_source",
true, true,
(args) -> { (args) -> {
String field = (String)args[0]; String field = (String)args[0];
long interval = 0; String fixedInterval = (String) args[1];
DateHistogramInterval dateHistogramInterval = null; String calendarInterval = (String) args[2];
if (args[1] instanceof Long) {
interval = (Long)args[1]; Interval interval = null;
if (fixedInterval != null && calendarInterval != null) {
throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found both");
} else if (fixedInterval != null) {
interval = new FixedInterval(new DateHistogramInterval(fixedInterval));
} else if (calendarInterval != null) {
interval = new CalendarInterval(new DateHistogramInterval(calendarInterval));
} else { } else {
dateHistogramInterval = (DateHistogramInterval) args[1]; throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found none");
} }
ZoneId zoneId = (ZoneId) args[2];
String format = (String) args[3]; ZoneId zoneId = (ZoneId) args[3];
return new DateHistogramGroupSource(field, interval, dateHistogramInterval, format, zoneId); String format = (String) args[4];
return new DateHistogramGroupSource(field, interval, format, zoneId);
}); });
static { static {
PARSER.declareString(optionalConstructorArg(), FIELD); PARSER.declareString(optionalConstructorArg(), FIELD);
PARSER.declareField(optionalConstructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { PARSER.declareString(optionalConstructorArg(), new ParseField(FixedInterval.NAME));
return p.longValue(); PARSER.declareString(optionalConstructorArg(), new ParseField(CalendarInterval.NAME));
} else {
return new DateHistogramInterval(p.text());
}
}, HistogramGroupSource.INTERVAL, ObjectParser.ValueType.LONG);
PARSER.declareField(optionalConstructorArg(), p -> { PARSER.declareField(optionalConstructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) { if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return ZoneId.of(p.text()); return ZoneId.of(p.text());
@ -84,15 +220,13 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
return PARSER.apply(parser, null); return PARSER.apply(parser, null);
} }
private final long interval; private final Interval interval;
private final DateHistogramInterval dateHistogramInterval;
private final String format; private final String format;
private final ZoneId timeZone; private final ZoneId timeZone;
DateHistogramGroupSource(String field, long interval, DateHistogramInterval dateHistogramInterval, String format, ZoneId timeZone) { DateHistogramGroupSource(String field, Interval interval, String format, ZoneId timeZone) {
super(field); super(field);
this.interval = interval; this.interval = interval;
this.dateHistogramInterval = dateHistogramInterval;
this.format = format; this.format = format;
this.timeZone = timeZone; this.timeZone = timeZone;
} }
@ -102,14 +236,10 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
return Type.DATE_HISTOGRAM; return Type.DATE_HISTOGRAM;
} }
public long getInterval() { public Interval getInterval() {
return interval; return interval;
} }
public DateHistogramInterval getDateHistogramInterval() {
return dateHistogramInterval;
}
public String getFormat() { public String getFormat() {
return format; return format;
} }
@ -124,11 +254,7 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
if (field != null) { if (field != null) {
builder.field(FIELD.getPreferredName(), field); builder.field(FIELD.getPreferredName(), field);
} }
if (dateHistogramInterval == null) { interval.toXContent(builder, params);
builder.field(HistogramGroupSource.INTERVAL.getPreferredName(), interval);
} else {
builder.field(HistogramGroupSource.INTERVAL.getPreferredName(), dateHistogramInterval.toString());
}
if (timeZone != null) { if (timeZone != null) {
builder.field(TIME_ZONE.getPreferredName(), timeZone.toString()); builder.field(TIME_ZONE.getPreferredName(), timeZone.toString());
} }
@ -152,15 +278,14 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
final DateHistogramGroupSource that = (DateHistogramGroupSource) other; final DateHistogramGroupSource that = (DateHistogramGroupSource) other;
return Objects.equals(this.field, that.field) && return Objects.equals(this.field, that.field) &&
Objects.equals(interval, that.interval) && Objects.equals(this.interval, that.interval) &&
Objects.equals(dateHistogramInterval, that.dateHistogramInterval) && Objects.equals(this.timeZone, that.timeZone) &&
Objects.equals(timeZone, that.timeZone) && Objects.equals(this.format, that.format);
Objects.equals(format, that.format);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(field, interval, dateHistogramInterval, timeZone, format); return Objects.hash(field, interval, timeZone, format);
} }
public static Builder builder() { public static Builder builder() {
@ -170,8 +295,7 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
public static class Builder { public static class Builder {
private String field; private String field;
private long interval = 0; private Interval interval;
private DateHistogramInterval dateHistogramInterval;
private String format; private String format;
private ZoneId timeZone; private ZoneId timeZone;
@ -187,41 +311,14 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
/** /**
* Set the interval for the DateHistogram grouping * Set the interval for the DateHistogram grouping
* @param interval the time interval in milliseconds * @param interval a fixed or calendar interval
* @return the {@link Builder} with the interval set. * @return the {@link Builder} with the interval set.
*/ */
public Builder setInterval(long interval) { public Builder setInterval(Interval interval) {
if (interval < 1) {
throw new IllegalArgumentException("[interval] must be greater than or equal to 1.");
}
this.interval = interval; this.interval = interval;
return this; return this;
} }
/**
* Set the interval for the DateHistogram grouping
* @param timeValue The time value to use as the interval
* @return the {@link Builder} with the interval set.
*/
public Builder setInterval(TimeValue timeValue) {
return setInterval(timeValue.getMillis());
}
/**
* Sets the interval of the DateHistogram grouping
*
* If this DateHistogramInterval is set, it supersedes the #{@link DateHistogramGroupSource#getInterval()}
* @param dateHistogramInterval the DateHistogramInterval to set
* @return The {@link Builder} with the dateHistogramInterval set.
*/
public Builder setDateHistgramInterval(DateHistogramInterval dateHistogramInterval) {
if (dateHistogramInterval == null) {
throw new IllegalArgumentException("[dateHistogramInterval] must not be null");
}
this.dateHistogramInterval = dateHistogramInterval;
return this;
}
/** /**
* Set the optional String formatting for the time interval. * Set the optional String formatting for the time interval.
* @param format The format of the output for the time interval key * @param format The format of the output for the time interval key
@ -243,7 +340,7 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
} }
public DateHistogramGroupSource build() { public DateHistogramGroupSource build() {
return new DateHistogramGroupSource(field, interval, dateHistogramInterval, format, timeZone); return new DateHistogramGroupSource(field, interval, format, timeZone);
} }
} }
} }

View File

@ -27,15 +27,20 @@ import java.io.IOException;
public class DateHistogramGroupSourceTests extends AbstractXContentTestCase<DateHistogramGroupSource> { public class DateHistogramGroupSourceTests extends AbstractXContentTestCase<DateHistogramGroupSource> {
public static DateHistogramGroupSource.Interval randomDateHistogramInterval() {
if (randomBoolean()) {
return new DateHistogramGroupSource.FixedInterval(new DateHistogramInterval(randomPositiveTimeValue()));
} else {
return new DateHistogramGroupSource.CalendarInterval(new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w")));
}
}
public static DateHistogramGroupSource randomDateHistogramGroupSource() { public static DateHistogramGroupSource randomDateHistogramGroupSource() {
String field = randomAlphaOfLengthBetween(1, 20); String field = randomAlphaOfLengthBetween(1, 20);
boolean setInterval = randomBoolean();
return new DateHistogramGroupSource(field, return new DateHistogramGroupSource(field,
setInterval ? randomLongBetween(1, 10_000) : 0, randomDateHistogramInterval(),
setInterval ? null : randomFrom(DateHistogramInterval.days(10), randomBoolean() ? randomAlphaOfLength(10) : null,
DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1)), randomBoolean() ? randomZone() : null);
randomBoolean() ? randomAlphaOfLength(10) : null,
randomBoolean() ? randomZone() : null);
} }
@Override @Override

View File

@ -0,0 +1,79 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.dataframe.transforms.pivot.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.xpack.core.dataframe.transforms.pivot.DateHistogramGroupSource;
import static org.hamcrest.Matchers.equalTo;
public class DateHistogramGroupSourceTests extends AbstractResponseTestCase<
DateHistogramGroupSource,
org.elasticsearch.client.dataframe.transforms.pivot.DateHistogramGroupSource> {
public static DateHistogramGroupSource randomDateHistogramGroupSource() {
String field = randomAlphaOfLengthBetween(1, 20);
DateHistogramGroupSource dateHistogramGroupSource; // = new DateHistogramGroupSource(field);
if (randomBoolean()) {
dateHistogramGroupSource = new DateHistogramGroupSource(field, new DateHistogramGroupSource.FixedInterval(
new DateHistogramInterval(randomPositiveTimeValue())));
} else {
dateHistogramGroupSource = new DateHistogramGroupSource(field, new DateHistogramGroupSource.CalendarInterval(
new DateHistogramInterval(randomTimeValue(1,1, "m", "h", "d", "w"))));
}
if (randomBoolean()) {
dateHistogramGroupSource.setTimeZone(randomZone());
}
if (randomBoolean()) {
dateHistogramGroupSource.setFormat(randomAlphaOfLength(10));
}
return dateHistogramGroupSource;
}
@Override
protected DateHistogramGroupSource createServerTestInstance() {
return randomDateHistogramGroupSource();
}
@Override
protected org.elasticsearch.client.dataframe.transforms.pivot.DateHistogramGroupSource doParseToClientInstance(XContentParser parser) {
return org.elasticsearch.client.dataframe.transforms.pivot.DateHistogramGroupSource.fromXContent(parser);
}
@Override
protected void assertInstances(DateHistogramGroupSource serverTestInstance,
org.elasticsearch.client.dataframe.transforms.pivot.DateHistogramGroupSource clientInstance) {
assertThat(serverTestInstance.getField(), equalTo(clientInstance.getField()));
assertThat(serverTestInstance.getFormat(), equalTo(clientInstance.getFormat()));
assertSameInterval(serverTestInstance.getInterval(), clientInstance.getInterval());
assertThat(serverTestInstance.getTimeZone(), equalTo(clientInstance.getTimeZone()));
assertThat(serverTestInstance.getType().name(), equalTo(clientInstance.getType().name()));
}
private void assertSameInterval(DateHistogramGroupSource.Interval serverTestInstance,
org.elasticsearch.client.dataframe.transforms.pivot.DateHistogramGroupSource.Interval clientInstance) {
assertEquals(serverTestInstance.getName(), clientInstance.getName());
assertEquals(serverTestInstance.getInterval(), clientInstance.getInterval());
}
}

View File

@ -8,10 +8,13 @@ package org.elasticsearch.xpack.core.dataframe.transforms.pivot;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import java.io.IOException; import java.io.IOException;
@ -19,28 +22,186 @@ import java.time.ZoneId;
import java.time.ZoneOffset; import java.time.ZoneOffset;
import java.util.Objects; import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DateHistogramGroupSource extends SingleGroupSource { public class DateHistogramGroupSource extends SingleGroupSource {
private static final int CALENDAR_INTERVAL_ID = 1;
private static final int FIXED_INTERVAL_ID = 0;
/**
* Interval can be specified in 2 ways:
*
* fixed_interval fixed intervals like 1h, 1m, 1d
* calendar_interval calendar aware intervals like 1M, 1Y, ...
*
* Note: data frames do not support the deprecated interval option
*/
public interface Interval extends Writeable, ToXContentFragment {
String getName();
DateHistogramInterval getInterval();
byte getIntervalTypeId();
}
public static class FixedInterval implements Interval {
private static final String NAME = "fixed_interval";
private final DateHistogramInterval interval;
public FixedInterval(DateHistogramInterval interval) {
this.interval = interval;
}
public FixedInterval(StreamInput in) throws IOException {
this.interval = new DateHistogramInterval(in);
}
@Override
public String getName() {
return NAME;
}
@Override
public DateHistogramInterval getInterval() {
return interval;
}
@Override
public byte getIntervalTypeId() {
return FIXED_INTERVAL_ID;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(NAME);
interval.toXContent(builder, params);
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
interval.writeTo(out);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final FixedInterval that = (FixedInterval) other;
return Objects.equals(this.interval, that.interval);
}
@Override
public int hashCode() {
return Objects.hash(interval);
}
}
public static class CalendarInterval implements Interval {
private static final String NAME = "calendar_interval";
private final DateHistogramInterval interval;
public CalendarInterval(DateHistogramInterval interval) {
this.interval = interval;
if (DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString()) == null) {
throw new IllegalArgumentException("The supplied interval [" + interval + "] could not be parsed " +
"as a calendar interval.");
}
}
public CalendarInterval(StreamInput in) throws IOException {
this.interval = new DateHistogramInterval(in);
}
@Override
public String getName() {
return NAME;
}
@Override
public DateHistogramInterval getInterval() {
return interval;
}
@Override
public byte getIntervalTypeId() {
return CALENDAR_INTERVAL_ID;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(NAME);
interval.toXContent(builder, params);
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
interval.writeTo(out);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final CalendarInterval that = (CalendarInterval) other;
return Objects.equals(this.interval, that.interval);
}
@Override
public int hashCode() {
return Objects.hash(interval);
}
}
private Interval readInterval(StreamInput in) throws IOException {
byte id = in.readByte();
switch (id) {
case FIXED_INTERVAL_ID:
return new FixedInterval(in);
case CALENDAR_INTERVAL_ID:
return new CalendarInterval(in);
default:
throw new IllegalArgumentException("unknown interval type [" + id + "]");
}
}
private void writeInterval(Interval interval, StreamOutput out) throws IOException {
out.write(interval.getIntervalTypeId());
interval.writeTo(out);
}
private static final String NAME = "data_frame_date_histogram_group"; private static final String NAME = "data_frame_date_histogram_group";
private static final ParseField TIME_ZONE = new ParseField("time_zone"); private static final ParseField TIME_ZONE = new ParseField("time_zone");
private static final ParseField FORMAT = new ParseField("format"); private static final ParseField FORMAT = new ParseField("format");
private static final ConstructingObjectParser<DateHistogramGroupSource, Void> STRICT_PARSER = createParser(false); private static final ConstructingObjectParser<DateHistogramGroupSource, Void> STRICT_PARSER = createParser(false);
private static final ConstructingObjectParser<DateHistogramGroupSource, Void> LENIENT_PARSER = createParser(true); private static final ConstructingObjectParser<DateHistogramGroupSource, Void> LENIENT_PARSER = createParser(true);
private long interval = 0;
private DateHistogramInterval dateHistogramInterval; private final Interval interval;
private String format; private String format;
private ZoneId timeZone; private ZoneId timeZone;
public DateHistogramGroupSource(String field) { public DateHistogramGroupSource(String field, Interval interval) {
super(field); super(field);
this.interval = interval;
} }
public DateHistogramGroupSource(StreamInput in) throws IOException { public DateHistogramGroupSource(StreamInput in) throws IOException {
super(in); super(in);
this.interval = in.readLong(); this.interval = readInterval(in);
this.dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new);
this.timeZone = in.readOptionalZoneId(); this.timeZone = in.readOptionalZoneId();
this.format = in.readOptionalString(); this.format = in.readOptionalString();
} }
@ -48,24 +209,28 @@ public class DateHistogramGroupSource extends SingleGroupSource {
private static ConstructingObjectParser<DateHistogramGroupSource, Void> createParser(boolean lenient) { private static ConstructingObjectParser<DateHistogramGroupSource, Void> createParser(boolean lenient) {
ConstructingObjectParser<DateHistogramGroupSource, Void> parser = new ConstructingObjectParser<>(NAME, lenient, (args) -> { ConstructingObjectParser<DateHistogramGroupSource, Void> parser = new ConstructingObjectParser<>(NAME, lenient, (args) -> {
String field = (String) args[0]; String field = (String) args[0];
return new DateHistogramGroupSource(field); String fixedInterval = (String) args[1];
String calendarInterval = (String) args[2];
Interval interval = null;
if (fixedInterval != null && calendarInterval != null) {
throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found both");
} else if (fixedInterval != null) {
interval = new FixedInterval(new DateHistogramInterval(fixedInterval));
} else if (calendarInterval != null) {
interval = new CalendarInterval(new DateHistogramInterval(calendarInterval));
} else {
throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found none");
}
return new DateHistogramGroupSource(field, interval);
}); });
declareValuesSourceFields(parser); declareValuesSourceFields(parser);
parser.declareField((histogram, interval) -> { parser.declareString(optionalConstructorArg(), new ParseField(FixedInterval.NAME));
if (interval instanceof Long) { parser.declareString(optionalConstructorArg(), new ParseField(CalendarInterval.NAME));
histogram.setInterval((long) interval);
} else {
histogram.setDateHistogramInterval((DateHistogramInterval) interval);
}
}, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) {
return p.longValue();
} else {
return new DateHistogramInterval(p.text());
}
}, HistogramGroupSource.INTERVAL, ObjectParser.ValueType.LONG);
parser.declareField(DateHistogramGroupSource::setTimeZone, p -> { parser.declareField(DateHistogramGroupSource::setTimeZone, p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) { if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
@ -88,28 +253,10 @@ public class DateHistogramGroupSource extends SingleGroupSource {
return Type.DATE_HISTOGRAM; return Type.DATE_HISTOGRAM;
} }
public long getInterval() { public Interval getInterval() {
return interval; return interval;
} }
public void setInterval(long interval) {
if (interval < 1) {
throw new IllegalArgumentException("[interval] must be greater than or equal to 1.");
}
this.interval = interval;
}
public DateHistogramInterval getDateHistogramInterval() {
return dateHistogramInterval;
}
public void setDateHistogramInterval(DateHistogramInterval dateHistogramInterval) {
if (dateHistogramInterval == null) {
throw new IllegalArgumentException("[dateHistogramInterval] must not be null");
}
this.dateHistogramInterval = dateHistogramInterval;
}
public String getFormat() { public String getFormat() {
return format; return format;
} }
@ -129,8 +276,7 @@ public class DateHistogramGroupSource extends SingleGroupSource {
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(field); out.writeOptionalString(field);
out.writeLong(interval); writeInterval(interval, out);
out.writeOptionalWriteable(dateHistogramInterval);
out.writeOptionalZoneId(timeZone); out.writeOptionalZoneId(timeZone);
out.writeOptionalString(format); out.writeOptionalString(format);
} }
@ -141,11 +287,7 @@ public class DateHistogramGroupSource extends SingleGroupSource {
if (field != null) { if (field != null) {
builder.field(FIELD.getPreferredName(), field); builder.field(FIELD.getPreferredName(), field);
} }
if (dateHistogramInterval == null) { interval.toXContent(builder, params);
builder.field(HistogramGroupSource.INTERVAL.getPreferredName(), interval);
} else {
builder.field(HistogramGroupSource.INTERVAL.getPreferredName(), dateHistogramInterval.toString());
}
if (timeZone != null) { if (timeZone != null) {
builder.field(TIME_ZONE.getPreferredName(), timeZone.toString()); builder.field(TIME_ZONE.getPreferredName(), timeZone.toString());
} }
@ -170,13 +312,12 @@ public class DateHistogramGroupSource extends SingleGroupSource {
return Objects.equals(this.field, that.field) && return Objects.equals(this.field, that.field) &&
Objects.equals(interval, that.interval) && Objects.equals(interval, that.interval) &&
Objects.equals(dateHistogramInterval, that.dateHistogramInterval) &&
Objects.equals(timeZone, that.timeZone) && Objects.equals(timeZone, that.timeZone) &&
Objects.equals(format, that.format); Objects.equals(format, that.format);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(field, interval, dateHistogramInterval, timeZone, format); return Objects.hash(field, interval, timeZone, format);
} }
} }

View File

@ -17,13 +17,15 @@ public class DateHistogramGroupSourceTests extends AbstractSerializingTestCase<D
public static DateHistogramGroupSource randomDateHistogramGroupSource() { public static DateHistogramGroupSource randomDateHistogramGroupSource() {
String field = randomAlphaOfLengthBetween(1, 20); String field = randomAlphaOfLengthBetween(1, 20);
DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(field); DateHistogramGroupSource dateHistogramGroupSource;
if (randomBoolean()) { if (randomBoolean()) {
dateHistogramGroupSource.setInterval(randomLongBetween(1, 10_000)); dateHistogramGroupSource = new DateHistogramGroupSource(field, new DateHistogramGroupSource.FixedInterval(
new DateHistogramInterval(randomPositiveTimeValue())));
} else { } else {
dateHistogramGroupSource.setDateHistogramInterval(randomFrom(DateHistogramInterval.days(10), dateHistogramGroupSource = new DateHistogramGroupSource(field, new DateHistogramGroupSource.CalendarInterval(
DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1))); new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w"))));
} }
if (randomBoolean()) { if (randomBoolean()) {
dateHistogramGroupSource.setTimeZone(randomZone()); dateHistogramGroupSource.setTimeZone(randomZone());
} }

View File

@ -135,24 +135,27 @@ abstract class DataFrameIntegTestCase extends ESRestTestCase {
TimeUnit.MILLISECONDS); TimeUnit.MILLISECONDS);
} }
protected DateHistogramGroupSource createDateHistogramGroupSource(String field, long interval, ZoneId zone, String format) { protected DateHistogramGroupSource createDateHistogramGroupSourceWithFixedInterval(String field,
DateHistogramInterval interval,
ZoneId zone,
String format) {
DateHistogramGroupSource.Builder builder = DateHistogramGroupSource.builder() DateHistogramGroupSource.Builder builder = DateHistogramGroupSource.builder()
.setField(field) .setField(field)
.setFormat(format) .setInterval(new DateHistogramGroupSource.FixedInterval(interval))
.setInterval(interval) .setTimeZone(zone)
.setTimeZone(zone); .setFormat(format);
return builder.build(); return builder.build();
} }
protected DateHistogramGroupSource createDateHistogramGroupSource(String field, protected DateHistogramGroupSource createDateHistogramGroupSourceWithCalendarInterval(String field,
DateHistogramInterval interval, DateHistogramInterval interval,
ZoneId zone, ZoneId zone,
String format) { String format) {
DateHistogramGroupSource.Builder builder = DateHistogramGroupSource.builder() DateHistogramGroupSource.Builder builder = DateHistogramGroupSource.builder()
.setField(field) .setField(field)
.setFormat(format) .setInterval(new DateHistogramGroupSource.CalendarInterval(interval))
.setDateHistgramInterval(interval) .setTimeZone(zone)
.setTimeZone(zone); .setFormat(format);
return builder.build(); return builder.build();
} }

View File

@ -35,7 +35,7 @@ public class DataFrameTransformIT extends DataFrameIntegTestCase {
createReviewsIndex(); createReviewsIndex();
Map<String, SingleGroupSource> groups = new HashMap<>(); Map<String, SingleGroupSource> groups = new HashMap<>();
groups.put("by-day", createDateHistogramGroupSource("timestamp", DateHistogramInterval.DAY, null, null)); groups.put("by-day", createDateHistogramGroupSourceWithCalendarInterval("timestamp", DateHistogramInterval.DAY, null, null));
groups.put("by-user", TermsGroupSource.builder().setField("user_id").build()); groups.put("by-user", TermsGroupSource.builder().setField("user_id").build());
groups.put("by-business", TermsGroupSource.builder().setField("business_id").build()); groups.put("by-business", TermsGroupSource.builder().setField("business_id").build());
@ -49,10 +49,8 @@ public class DataFrameTransformIT extends DataFrameIntegTestCase {
"reviews-by-user-business-day", "reviews-by-user-business-day",
REVIEWS_INDEX_NAME); REVIEWS_INDEX_NAME);
final RequestOptions options = assertTrue(putDataFrameTransform(config, RequestOptions.DEFAULT).isAcknowledged());
expectWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."); assertTrue(startDataFrameTransform(config.getId(), RequestOptions.DEFAULT).isStarted());
assertTrue(putDataFrameTransform(config, options).isAcknowledged());
assertTrue(startDataFrameTransform(config.getId(), options).isStarted());
waitUntilCheckpoint(config.getId(), 1L); waitUntilCheckpoint(config.getId(), 1L);

View File

@ -218,7 +218,7 @@ public class DataFramePivotRestIT extends DataFrameRestTestCase {
+ " \"group_by\": {" + " \"group_by\": {"
+ " \"by_hr\": {" + " \"by_hr\": {"
+ " \"date_histogram\": {" + " \"date_histogram\": {"
+ " \"interval\": \"1h\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-DD_HH\"" + " \"fixed_interval\": \"1h\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-dd_HH\""
+ " } } }," + " } } },"
+ " \"aggregations\": {" + " \"aggregations\": {"
+ " \"avg_rating\": {" + " \"avg_rating\": {"
@ -228,14 +228,11 @@ public class DataFramePivotRestIT extends DataFrameRestTestCase {
+ "}"; + "}";
createDataframeTransformRequest.setJsonEntity(config); createDataframeTransformRequest.setJsonEntity(config);
createDataframeTransformRequest.setOptions(expectWarnings("[interval] on [date_histogram] is deprecated, " +
"use [fixed_interval] or [calendar_interval] in the future."));
Map<String, Object> createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); Map<String, Object> createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest));
assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE));
startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS, startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS);
"[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.");
assertTrue(indexExists(dataFrameIndex)); assertTrue(indexExists(dataFrameIndex));
Map<String, Object> indexStats = getAsMap(dataFrameIndex + "/_stats"); Map<String, Object> indexStats = getAsMap(dataFrameIndex + "/_stats");
@ -255,7 +252,7 @@ public class DataFramePivotRestIT extends DataFrameRestTestCase {
config += " \"pivot\": {" config += " \"pivot\": {"
+ " \"group_by\": {" + " \"group_by\": {"
+ " \"reviewer\": {\"terms\": { \"field\": \"user_id\" }}," + " \"reviewer\": {\"terms\": { \"field\": \"user_id\" }},"
+ " \"by_day\": {\"date_histogram\": {\"interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-DD\"}}}," + " \"by_day\": {\"date_histogram\": {\"fixed_interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-dd\"}}},"
+ " \"aggregations\": {" + " \"aggregations\": {"
+ " \"avg_rating\": {" + " \"avg_rating\": {"
+ " \"avg\": {" + " \"avg\": {"
@ -263,8 +260,6 @@ public class DataFramePivotRestIT extends DataFrameRestTestCase {
+ " } } } }" + " } } } }"
+ "}"; + "}";
createPreviewRequest.setJsonEntity(config); createPreviewRequest.setJsonEntity(config);
createPreviewRequest.setOptions(expectWarnings("[interval] on [date_histogram] is deprecated, " +
"use [fixed_interval] or [calendar_interval] in the future."));
Map<String, Object> previewDataframeResponse = entityAsMap(client().performRequest(createPreviewRequest)); Map<String, Object> previewDataframeResponse = entityAsMap(client().performRequest(createPreviewRequest));
List<Map<String, Object>> preview = (List<Map<String, Object>>)previewDataframeResponse.get("preview"); List<Map<String, Object>> preview = (List<Map<String, Object>>)previewDataframeResponse.get("preview");
@ -292,7 +287,7 @@ public class DataFramePivotRestIT extends DataFrameRestTestCase {
config +=" \"pivot\": { \n" + config +=" \"pivot\": { \n" +
" \"group_by\": {\n" + " \"group_by\": {\n" +
" \"by_day\": {\"date_histogram\": {\n" + " \"by_day\": {\"date_histogram\": {\n" +
" \"interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-DD\"\n" + " \"fixed_interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-dd\"\n" +
" }}\n" + " }}\n" +
" },\n" + " },\n" +
" \n" + " \n" +
@ -307,13 +302,11 @@ public class DataFramePivotRestIT extends DataFrameRestTestCase {
+ "}"; + "}";
createDataframeTransformRequest.setJsonEntity(config); createDataframeTransformRequest.setJsonEntity(config);
createDataframeTransformRequest.setOptions(expectWarnings("[interval] on [date_histogram] is deprecated, " +
"use [fixed_interval] or [calendar_interval] in the future."));
Map<String, Object> createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); Map<String, Object> createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest));
assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE));
startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS, startAndWaitForTransform(transformId, dataFrameIndex, BASIC_AUTH_VALUE_DATA_FRAME_ADMIN_WITH_SOME_DATA_ACCESS);
"[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future.");
assertTrue(indexExists(dataFrameIndex)); assertTrue(indexExists(dataFrameIndex));
// we expect 21 documents as there shall be 21 days worth of docs // we expect 21 documents as there shall be 21 days worth of docs

View File

@ -67,12 +67,7 @@ setup:
--- ---
"Test preview transform": "Test preview transform":
- skip:
reason: date histo interval is deprecated
features: "warnings"
- do: - do:
warnings:
- "[interval] on [date_histogram] is deprecated, use [fixed_interval] or [calendar_interval] in the future."
data_frame.preview_data_frame_transform: data_frame.preview_data_frame_transform:
body: > body: >
{ {
@ -80,7 +75,7 @@ setup:
"pivot": { "pivot": {
"group_by": { "group_by": {
"airline": {"terms": {"field": "airline"}}, "airline": {"terms": {"field": "airline"}},
"by-hour": {"date_histogram": {"interval": "1h", "field": "time", "format": "yyyy-MM-DD HH"}}}, "by-hour": {"date_histogram": {"fixed_interval": "1h", "field": "time", "format": "yyyy-MM-dd HH"}}},
"aggs": { "aggs": {
"avg_response": {"avg": {"field": "responsetime"}}, "avg_response": {"avg": {"field": "responsetime"}},
"time.max": {"max": {"field": "time"}}, "time.max": {"max": {"field": "time"}},
@ -89,17 +84,17 @@ setup:
} }
} }
- match: { preview.0.airline: foo } - match: { preview.0.airline: foo }
- match: { preview.0.by-hour: "2017-02-49 00" } - match: { preview.0.by-hour: "2017-02-18 00" }
- match: { preview.0.avg_response: 1.0 } - match: { preview.0.avg_response: 1.0 }
- match: { preview.0.time.max: "2017-02-18T00:30:00.000Z" } - match: { preview.0.time.max: "2017-02-18T00:30:00.000Z" }
- match: { preview.0.time.min: "2017-02-18T00:00:00.000Z" } - match: { preview.0.time.min: "2017-02-18T00:00:00.000Z" }
- match: { preview.1.airline: bar } - match: { preview.1.airline: bar }
- match: { preview.1.by-hour: "2017-02-49 01" } - match: { preview.1.by-hour: "2017-02-18 01" }
- match: { preview.1.avg_response: 42.0 } - match: { preview.1.avg_response: 42.0 }
- match: { preview.1.time.max: "2017-02-18T01:00:00.000Z" } - match: { preview.1.time.max: "2017-02-18T01:00:00.000Z" }
- match: { preview.1.time.min: "2017-02-18T01:00:00.000Z" } - match: { preview.1.time.min: "2017-02-18T01:00:00.000Z" }
- match: { preview.2.airline: foo } - match: { preview.2.airline: foo }
- match: { preview.2.by-hour: "2017-02-49 01" } - match: { preview.2.by-hour: "2017-02-18 01" }
- match: { preview.2.avg_response: 42.0 } - match: { preview.2.avg_response: 42.0 }
- match: { preview.2.time.max: "2017-02-18T01:01:00.000Z" } - match: { preview.2.time.max: "2017-02-18T01:01:00.000Z" }
- match: { preview.2.time.min: "2017-02-18T01:01:00.000Z" } - match: { preview.2.time.min: "2017-02-18T01:01:00.000Z" }
@ -128,7 +123,7 @@ setup:
"pivot": { "pivot": {
"group_by": { "group_by": {
"airline": {"terms": {"field": "airline"}}, "airline": {"terms": {"field": "airline"}},
"by-hour": {"date_histogram": {"interval": "1h", "field": "time", "format": "yyyy-MM-DD HH"}}}, "by-hour": {"date_histogram": {"fixed_interval": "1h", "field": "time", "format": "yyyy-MM-dd HH"}}},
"aggs": {"avg_response": {"avg": {"field": "responsetime"}}} "aggs": {"avg_response": {"avg": {"field": "responsetime"}}}
} }
} }