[ML] data frame, adding builder classes for complex config classes (#41638) (#41704)

* [ML] data frame, adding builder classes for complex config classes

* Addressing PR comments, adding some java docs

* cleaning up constructor

* fixing indentation

* change constructors to be package-private
This commit is contained in:
Benjamin Trent 2019-05-01 06:44:29 -05:00 committed by GitHub
parent 26c72c96bd
commit bc333a5cbf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 460 additions and 124 deletions

View File

@ -87,7 +87,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
return new DataFrameTransformConfig(null, source, null, pivotConfig, null);
}
public DataFrameTransformConfig(final String id,
DataFrameTransformConfig(final String id,
final SourceConfig source,
final DestConfig dest,
final PivotConfig pivotConfig,
@ -170,4 +170,46 @@ public class DataFrameTransformConfig implements ToXContentObject {
public String toString() {
return Strings.toString(this, true, true);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String id;
private SourceConfig source;
private DestConfig dest;
private PivotConfig pivotConfig;
private String description;
public Builder setId(String id) {
this.id = id;
return this;
}
public Builder setSource(SourceConfig source) {
this.source = source;
return this;
}
public Builder setDest(DestConfig dest) {
this.dest = dest;
return this;
}
public Builder setPivotConfig(PivotConfig pivotConfig) {
this.pivotConfig = pivotConfig;
return this;
}
public Builder setDescription(String description) {
this.description = description;
return this;
}
public DataFrameTransformConfig build() {
return new DataFrameTransformConfig(id, source, dest, pivotConfig, description);
}
}
}

View File

@ -23,6 +23,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import java.io.IOException;
import java.util.Arrays;
@ -76,7 +77,7 @@ public class SourceConfig implements ToXContentObject {
* @param index Any number of indices. At least one non-null, non-empty, index should be provided
* @param queryConfig A QueryConfig object that contains the desired query. Defaults to MatchAll query.
*/
public SourceConfig(String[] index, QueryConfig queryConfig) {
SourceConfig(String[] index, QueryConfig queryConfig) {
this.index = index;
this.queryConfig = queryConfig;
}
@ -121,4 +122,46 @@ public class SourceConfig implements ToXContentObject {
int hash = Arrays.hashCode(index);
return 31 * hash + (queryConfig == null ? 0 : queryConfig.hashCode());
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String[] index;
private QueryConfig queryConfig;
/**
* Sets what indices from which to fetch data
* @param index The indices from which to fetch data
* @return The {@link Builder} with indices set
*/
public Builder setIndex(String... index) {
this.index = index;
return this;
}
/**
* Sets the {@link QueryConfig} object that references the desired query to use when fetching the data
* @param queryConfig The {@link QueryConfig} to use when fetching data
* @return The {@link Builder} with queryConfig set
*/
public Builder setQueryConfig(QueryConfig queryConfig) {
this.queryConfig = queryConfig;
return this;
}
/**
* Sets the query to use when fetching the data. Convenience method for {@link #setQueryConfig(QueryConfig)}
* @param query The {@link QueryBuilder} to use when fetch data (overwrites the {@link QueryConfig})
* @return The {@link Builder} with queryConfig set
*/
public Builder setQuery(QueryBuilder query) {
return this.setQueryConfig(new QueryConfig(query));
}
public SourceConfig build() {
return new SourceConfig(index, queryConfig);
}
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.client.dataframe.transforms.pivot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
@ -34,30 +35,41 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* A grouping via a date histogram aggregation referencing a timefield
*/
public class DateHistogramGroupSource extends SingleGroupSource implements ToXContentObject {
private static final ParseField TIME_ZONE = new ParseField("time_zone");
private static final ParseField FORMAT = new ParseField("format");
private static final ConstructingObjectParser<DateHistogramGroupSource, Void> PARSER =
new ConstructingObjectParser<>("date_histogram_group_source", true, (args) -> new DateHistogramGroupSource((String) args[0]));
new ConstructingObjectParser<>("date_histogram_group_source",
true,
(args) -> {
String field = (String)args[0];
long interval = 0;
DateHistogramInterval dateHistogramInterval = null;
if (args[1] instanceof Long) {
interval = (Long)args[1];
} else {
dateHistogramInterval = (DateHistogramInterval) args[1];
}
ZoneId zoneId = (ZoneId) args[2];
String format = (String) args[3];
return new DateHistogramGroupSource(field, interval, dateHistogramInterval, format, zoneId);
});
static {
PARSER.declareString(optionalConstructorArg(), FIELD);
PARSER.declareField((histogram, interval) -> {
if (interval instanceof Long) {
histogram.setInterval((long) interval);
} else {
histogram.setDateHistogramInterval((DateHistogramInterval) interval);
}
}, p -> {
PARSER.declareField(optionalConstructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) {
return p.longValue();
} else {
return new DateHistogramInterval(p.text());
}
}, HistogramGroupSource.INTERVAL, ObjectParser.ValueType.LONG);
PARSER.declareField(DateHistogramGroupSource::setTimeZone, p -> {
PARSER.declareField(optionalConstructorArg(), p -> {
if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
return ZoneId.of(p.text());
} else {
@ -65,20 +77,24 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
}
}, TIME_ZONE, ObjectParser.ValueType.LONG);
PARSER.declareString(DateHistogramGroupSource::setFormat, FORMAT);
PARSER.declareString(optionalConstructorArg(), FORMAT);
}
public static DateHistogramGroupSource fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
private long interval = 0;
private DateHistogramInterval dateHistogramInterval;
private String format;
private ZoneId timeZone;
private final long interval;
private final DateHistogramInterval dateHistogramInterval;
private final String format;
private final ZoneId timeZone;
public DateHistogramGroupSource(String field) {
DateHistogramGroupSource(String field, long interval, DateHistogramInterval dateHistogramInterval, String format, ZoneId timeZone) {
super(field);
this.interval = interval;
this.dateHistogramInterval = dateHistogramInterval;
this.format = format;
this.timeZone = timeZone;
}
@Override
@ -90,40 +106,18 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
return interval;
}
public void setInterval(long interval) {
if (interval < 1) {
throw new IllegalArgumentException("[interval] must be greater than or equal to 1.");
}
this.interval = interval;
}
public DateHistogramInterval getDateHistogramInterval() {
return dateHistogramInterval;
}
public void setDateHistogramInterval(DateHistogramInterval dateHistogramInterval) {
if (dateHistogramInterval == null) {
throw new IllegalArgumentException("[dateHistogramInterval] must not be null");
}
this.dateHistogramInterval = dateHistogramInterval;
}
public String getFormat() {
return format;
}
public void setFormat(String format) {
this.format = format;
}
public ZoneId getTimeZone() {
return timeZone;
}
public void setTimeZone(ZoneId timeZone) {
this.timeZone = timeZone;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
@ -168,4 +162,88 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
public int hashCode() {
return Objects.hash(field, interval, dateHistogramInterval, timeZone, format);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String field;
private long interval = 0;
private DateHistogramInterval dateHistogramInterval;
private String format;
private ZoneId timeZone;
/**
* The field with which to construct the date histogram grouping
* @param field The field name
* @return The {@link Builder} with the field set.
*/
public Builder setField(String field) {
this.field = field;
return this;
}
/**
* Set the interval for the DateHistogram grouping
* @param interval the time interval in milliseconds
* @return the {@link Builder} with the interval set.
*/
public Builder setInterval(long interval) {
if (interval < 1) {
throw new IllegalArgumentException("[interval] must be greater than or equal to 1.");
}
this.interval = interval;
return this;
}
/**
* Set the interval for the DateHistogram grouping
* @param timeValue The time value to use as the interval
* @return the {@link Builder} with the interval set.
*/
public Builder setInterval(TimeValue timeValue) {
return setInterval(timeValue.getMillis());
}
/**
* Sets the interval of the DateHistogram grouping
*
* If this DateHistogramInterval is set, it supersedes the #{@link DateHistogramGroupSource#getInterval()}
* @param dateHistogramInterval the DateHistogramInterval to set
* @return The {@link Builder} with the dateHistogramInterval set.
*/
public Builder setDateHistgramInterval(DateHistogramInterval dateHistogramInterval) {
if (dateHistogramInterval == null) {
throw new IllegalArgumentException("[dateHistogramInterval] must not be null");
}
this.dateHistogramInterval = dateHistogramInterval;
return this;
}
/**
* Set the optional String formatting for the time interval.
* @param format The format of the output for the time interval key
* @return The {@link Builder} with the format set.
*/
public Builder setFormat(String format) {
this.format = format;
return this;
}
/**
* Sets the time zone to use for this aggregation
* @param timeZone The zoneId for the timeZone
* @return The {@link Builder} with the timeZone set.
*/
public Builder setTimeZone(ZoneId timeZone) {
this.timeZone = timeZone;
return this;
}
public DateHistogramGroupSource build() {
return new DateHistogramGroupSource(field, interval, dateHistogramInterval, format, timeZone);
}
}
}

View File

@ -26,12 +26,16 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
/**
* Class describing how to group data
*/
public class GroupConfig implements ToXContentObject {
private final Map<String, SingleGroupSource> groups;
@ -126,7 +130,7 @@ public class GroupConfig implements ToXContentObject {
} while (endObjectCount != 0);
}
public GroupConfig(Map<String, SingleGroupSource> groups) {
GroupConfig(Map<String, SingleGroupSource> groups) {
this.groups = groups;
}
@ -174,4 +178,27 @@ public class GroupConfig implements ToXContentObject {
public String toString() {
return Strings.toString(this, true, true);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private final Map<String, SingleGroupSource> groups = new HashMap<>();
/**
* Add a new grouping to the builder
* @param name The name of the resulting grouped field
* @param group The type of grouping referenced
* @return The {@link Builder} with a new grouping entry added
*/
public Builder groupBy(String name, SingleGroupSource group) {
groups.put(name, group);
return this;
}
public GroupConfig build() {
return new GroupConfig(groups);
}
}
}

View File

@ -31,6 +31,9 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* A grouping via a histogram aggregation referencing a numeric field
*/
public class HistogramGroupSource extends SingleGroupSource implements ToXContentObject {
protected static final ParseField INTERVAL = new ParseField("interval");
@ -49,7 +52,7 @@ public class HistogramGroupSource extends SingleGroupSource implements ToXConten
private final double interval;
public HistogramGroupSource(String field, double interval) {
HistogramGroupSource(String field, double interval) {
super(field);
if (interval <= 0) {
throw new IllegalArgumentException("[interval] must be greater than 0.");
@ -97,4 +100,38 @@ public class HistogramGroupSource extends SingleGroupSource implements ToXConten
public int hashCode() {
return Objects.hash(field, interval);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String field;
private double interval;
/**
* The field to reference in the histogram grouping
* @param field The numeric field name to use in the histogram grouping
* @return The {@link Builder} with the field set.
*/
public Builder setField(String field) {
this.field = field;
return this;
}
/**
* Set the interval for the histogram aggregation
* @param interval The numeric interval for the histogram grouping
* @return The {@link Builder} with the interval set.
*/
public Builder setInterval(double interval) {
this.interval = interval;
return this;
}
public HistogramGroupSource build() {
return new HistogramGroupSource(field, interval);
}
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import java.io.IOException;
import java.util.Objects;
@ -31,6 +32,9 @@ import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Class describing how to pivot data via {@link GroupConfig} and {@link AggregationConfig} objects
*/
public class PivotConfig implements ToXContentObject {
private static final ParseField GROUP_BY = new ParseField("group_by");
@ -51,7 +55,7 @@ public class PivotConfig implements ToXContentObject {
return PARSER.apply(parser, null);
}
public PivotConfig(GroupConfig groups, final AggregationConfig aggregationConfig) {
PivotConfig(GroupConfig groups, final AggregationConfig aggregationConfig) {
this.groups = groups;
this.aggregationConfig = aggregationConfig;
}
@ -96,4 +100,47 @@ public class PivotConfig implements ToXContentObject {
public boolean isValid() {
return groups.isValid() && aggregationConfig.isValid();
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private GroupConfig groups;
private AggregationConfig aggregationConfig;
/**
* Set how to group the source data
* @param groups The configuration describing how to group and pivot the source data
* @return the {@link Builder} with the interval set.
*/
public Builder setGroups(GroupConfig groups) {
this.groups = groups;
return this;
}
/**
* Set the aggregated fields to include in the pivot config
* @param aggregationConfig The configuration describing the aggregated fields
* @return the {@link Builder} with the aggregations set.
*/
public Builder setAggregationConfig(AggregationConfig aggregationConfig) {
this.aggregationConfig = aggregationConfig;
return this;
}
/**
* Set the aggregated fields to include in the pivot config
* @param aggregations The aggregated field builders
* @return the {@link Builder} with the aggregations set.
*/
public Builder setAggregations(AggregatorFactories.Builder aggregations) {
this.aggregationConfig = new AggregationConfig(aggregations);
return this;
}
public PivotConfig build() {
return new PivotConfig(groups, aggregationConfig);
}
}
}

View File

@ -42,7 +42,7 @@ public class TermsGroupSource extends SingleGroupSource implements ToXContentObj
return PARSER.apply(parser, null);
}
public TermsGroupSource(final String field) {
TermsGroupSource(final String field) {
super(field);
}
@ -60,4 +60,27 @@ public class TermsGroupSource extends SingleGroupSource implements ToXContentObj
builder.endObject();
return builder;
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String field;
/**
* The field with which to construct the date histogram grouping
* @param field The field name
* @return The {@link Builder} with the field set.
*/
public Builder setField(String field) {
this.field = field;
return this;
}
public TermsGroupSource build() {
return new TermsGroupSource(field);
}
}
}

View File

@ -43,9 +43,7 @@ import org.elasticsearch.client.dataframe.transforms.DataFrameTransformConfig;
import org.elasticsearch.client.dataframe.transforms.DataFrameTransformStateAndStats;
import org.elasticsearch.client.dataframe.transforms.DataFrameTransformTaskState;
import org.elasticsearch.client.dataframe.transforms.DestConfig;
import org.elasticsearch.client.dataframe.transforms.QueryConfig;
import org.elasticsearch.client.dataframe.transforms.SourceConfig;
import org.elasticsearch.client.dataframe.transforms.pivot.AggregationConfig;
import org.elasticsearch.client.dataframe.transforms.pivot.GroupConfig;
import org.elasticsearch.client.dataframe.transforms.pivot.PivotConfig;
import org.elasticsearch.client.dataframe.transforms.pivot.TermsGroupSource;
@ -61,7 +59,6 @@ import org.junit.After;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -300,20 +297,21 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
}
private DataFrameTransformConfig validDataFrameTransformConfig(String id, String source, String destination) {
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregations(aggBuilder).build();
DestConfig destConfig = (destination != null) ? new DestConfig(destination) : null;
return new DataFrameTransformConfig(id,
new SourceConfig(new String[]{source}, queryConfig),
destConfig,
pivotConfig,
"this is a test transform");
return DataFrameTransformConfig.builder()
.setId(id)
.setSource(SourceConfig.builder().setIndex(source).setQuery(new MatchAllQueryBuilder()).build())
.setDest(destConfig)
.setPivotConfig(pivotConfig)
.setDescription("this is a test transform")
.build();
}
public void testGetStats() throws Exception {
@ -321,19 +319,20 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
createIndex(sourceIndex);
indexData(sourceIndex);
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregations(aggBuilder).build();
String id = "test-get-stats";
DataFrameTransformConfig transform = new DataFrameTransformConfig(id,
new SourceConfig(new String[]{sourceIndex}, queryConfig),
new DestConfig("pivot-dest"),
pivotConfig,
"transform for testing stats");
DataFrameTransformConfig transform = DataFrameTransformConfig.builder()
.setId(id)
.setSource(SourceConfig.builder().setIndex(sourceIndex).setQuery(new MatchAllQueryBuilder()).build())
.setDest(new DestConfig("pivot-dest"))
.setPivotConfig(pivotConfig)
.setDescription("transform for testing stats")
.build();
DataFrameClient client = highLevelClient().dataFrame();
AcknowledgedResponse ack = execute(new PutDataFrameTransformRequest(transform), client::putDataFrameTransform,

View File

@ -70,7 +70,7 @@ public class PreviewDataFrameTransformRequestTests extends AbstractXContentTestC
assertFalse(new PreviewDataFrameTransformRequest(config).validate().isPresent());
// null source is not valid
config = new DataFrameTransformConfig(null, null, null, PivotConfigTests.randomPivotConfig(), null);
config = DataFrameTransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build();
Optional<ValidationException> error = new PreviewDataFrameTransformRequest(config).validate();
assertTrue(error.isPresent());

View File

@ -40,7 +40,7 @@ public class PutDataFrameTransformRequestTests extends AbstractXContentTestCase<
public void testValidate() {
assertFalse(createTestInstance().validate().isPresent());
DataFrameTransformConfig config = new DataFrameTransformConfig(null, null, null, PivotConfigTests.randomPivotConfig(), null);
DataFrameTransformConfig config = DataFrameTransformConfig.builder().setPivotConfig(PivotConfigTests.randomPivotConfig()).build();
Optional<ValidationException> error = new PutDataFrameTransformRequest(config).validate();
assertTrue(error.isPresent());

View File

@ -29,20 +29,13 @@ public class DateHistogramGroupSourceTests extends AbstractXContentTestCase<Date
public static DateHistogramGroupSource randomDateHistogramGroupSource() {
String field = randomAlphaOfLengthBetween(1, 20);
DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(field);
if (randomBoolean()) {
dateHistogramGroupSource.setInterval(randomLongBetween(1, 10_000));
} else {
dateHistogramGroupSource.setDateHistogramInterval(randomFrom(DateHistogramInterval.days(10),
DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1)));
}
if (randomBoolean()) {
dateHistogramGroupSource.setTimeZone(randomZone());
}
if (randomBoolean()) {
dateHistogramGroupSource.setFormat(randomAlphaOfLength(10));
}
return dateHistogramGroupSource;
boolean setInterval = randomBoolean();
return new DateHistogramGroupSource(field,
setInterval ? randomLongBetween(1, 10_000) : 0,
setInterval ? null : randomFrom(DateHistogramInterval.days(10),
DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1)),
randomBoolean() ? randomAlphaOfLength(10) : null,
randomBoolean() ? randomZone() : null);
}
@Override

View File

@ -61,7 +61,6 @@ import org.junit.After;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@ -119,13 +118,15 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
// end::put-data-frame-transform-query-config
// tag::put-data-frame-transform-source-config
SourceConfig sourceConfig =
new SourceConfig(new String[]{"source-index"}, queryConfig);
SourceConfig sourceConfig = SourceConfig.builder()
.setIndex("source-index")
.setQueryConfig(queryConfig).build();
// end::put-data-frame-transform-source-config
// tag::put-data-frame-transform-group-config
GroupConfig groupConfig =
new GroupConfig(Collections.singletonMap("reviewer", // <1>
new TermsGroupSource("user_id"))); // <2>
GroupConfig groupConfig = GroupConfig.builder()
.groupBy("reviewer", // <1>
TermsGroupSource.builder().setField("user_id").build()) // <2>
.build();
// end::put-data-frame-transform-group-config
// tag::put-data-frame-transform-agg-config
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
@ -134,15 +135,20 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
// end::put-data-frame-transform-agg-config
// tag::put-data-frame-transform-pivot-config
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
PivotConfig pivotConfig = PivotConfig.builder()
.setGroups(groupConfig)
.setAggregationConfig(aggConfig)
.build();
// end::put-data-frame-transform-pivot-config
// tag::put-data-frame-transform-config
DataFrameTransformConfig transformConfig =
new DataFrameTransformConfig("reviewer-avg-rating", // <1>
sourceConfig, // <2>
new DestConfig("pivot-destination"), // <3>
pivotConfig, // <4>
"This is my test transform"); // <5>
DataFrameTransformConfig transformConfig = DataFrameTransformConfig
.builder()
.setId("reviewer-avg-rating") // <1>
.setSource(sourceConfig) // <2>
.setDest(new DestConfig("pivot-destination")) // <3>
.setPivotConfig(pivotConfig) // <4>
.setDescription("This is my test transform") // <5>
.build();
// end::put-data-frame-transform-config
{
@ -160,9 +166,12 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
assertTrue(response.isAcknowledged());
}
{
DataFrameTransformConfig configWithDifferentId = new DataFrameTransformConfig("reviewer-avg-rating2",
transformConfig.getSource(), transformConfig.getDestination(),
transformConfig.getPivotConfig(), null);
DataFrameTransformConfig configWithDifferentId = DataFrameTransformConfig.builder()
.setId("reviewer-avg-rating2")
.setSource(transformConfig.getSource())
.setDest(transformConfig.getDestination())
.setPivotConfig(transformConfig.getPivotConfig())
.build();
PutDataFrameTransformRequest request = new PutDataFrameTransformRequest(configWithDifferentId);
// tag::put-data-frame-transform-execute-listener
@ -199,14 +208,19 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
RestHighLevelClient client = highLevelClient();
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
DataFrameTransformConfig transformConfig = new DataFrameTransformConfig("mega-transform",
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("pivot-dest"), pivotConfig, null);
DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder()
.setId("mega-transform")
.setSource(SourceConfig.builder().setIndex("source-data").setQueryConfig(queryConfig).build())
.setDest(new DestConfig("pivot-dest"))
.setPivotConfig(pivotConfig)
.build();
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT);
transformsToClean.add(transformConfig.getId());
@ -313,17 +327,31 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
RestHighLevelClient client = highLevelClient();
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
DataFrameTransformConfig transformConfig1 = new DataFrameTransformConfig("mega-transform",
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("pivot-dest"), pivotConfig, null);
DataFrameTransformConfig transformConfig2 = new DataFrameTransformConfig("mega-transform2",
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("pivot-dest2"), pivotConfig, null);
DataFrameTransformConfig transformConfig1 = DataFrameTransformConfig.builder()
.setId("mega-transform")
.setSource(SourceConfig.builder()
.setIndex("source-data")
.setQuery(new MatchAllQueryBuilder())
.build())
.setDest(new DestConfig("pivot-dest"))
.setPivotConfig(pivotConfig)
.build();
DataFrameTransformConfig transformConfig2 = DataFrameTransformConfig.builder()
.setId("mega-transform2")
.setSource(SourceConfig.builder()
.setIndex("source-data")
.setQuery(new MatchAllQueryBuilder())
.build())
.setDest(new DestConfig("pivot-dest2"))
.setPivotConfig(pivotConfig)
.build();
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig1), RequestOptions.DEFAULT);
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig2), RequestOptions.DEFAULT);
@ -379,16 +407,20 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
RestHighLevelClient client = highLevelClient();
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
// tag::preview-data-frame-transform-request
DataFrameTransformConfig transformConfig =
DataFrameTransformConfig.forPreview(
new SourceConfig(new String[]{"source-data"}, queryConfig), // <1>
SourceConfig.builder()
.setIndex("source-data")
.setQueryConfig(queryConfig)
.build(), // <1>
pivotConfig); // <2>
PreviewDataFrameTransformRequest request =
new PreviewDataFrameTransformRequest(transformConfig); // <3>
@ -438,15 +470,23 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
RestHighLevelClient client = highLevelClient();
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
String id = "statisitcal-transform";
DataFrameTransformConfig transformConfig = new DataFrameTransformConfig(id,
new SourceConfig(new String[]{"source-data"}, queryConfig), new DestConfig("dest"), pivotConfig, null);
DataFrameTransformConfig transformConfig = DataFrameTransformConfig.builder()
.setId(id)
.setSource(SourceConfig.builder()
.setIndex("source-data")
.setQuery(new MatchAllQueryBuilder())
.build())
.setDest(new DestConfig("pivot-dest"))
.setPivotConfig(pivotConfig)
.build();
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(transformConfig), RequestOptions.DEFAULT);
// tag::get-data-frame-transform-stats-request
@ -516,16 +556,23 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
createIndex("source-data");
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
GroupConfig groupConfig = new GroupConfig(Collections.singletonMap("reviewer", new TermsGroupSource("user_id")));
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
TermsGroupSource.builder().setField("user_id").build()).build();
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
aggBuilder.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
AggregationConfig aggConfig = new AggregationConfig(aggBuilder);
PivotConfig pivotConfig = new PivotConfig(groupConfig, aggConfig);
PivotConfig pivotConfig = PivotConfig.builder().setGroups(groupConfig).setAggregationConfig(aggConfig).build();
DataFrameTransformConfig putTransformConfig = new DataFrameTransformConfig("mega-transform",
new SourceConfig(new String[]{"source-data"}, queryConfig),
new DestConfig("pivot-dest"), pivotConfig, null);
DataFrameTransformConfig putTransformConfig = DataFrameTransformConfig.builder()
.setId("mega-transform")
.setSource(SourceConfig.builder()
.setIndex("source-data")
.setQuery(new MatchAllQueryBuilder())
.build())
.setDest(new DestConfig("pivot-dest"))
.setPivotConfig(pivotConfig)
.build();
RestHighLevelClient client = highLevelClient();
client.dataFrame().putDataFrameTransform(new PutDataFrameTransformRequest(putTransformConfig), RequestOptions.DEFAULT);