[7.x][Transform] add support for script in group_by (#53167) (#53324)

add the possibility to base the group_by on the output of a script.

closes #43152
backport #53167
This commit is contained in:
Hendrik Muhs 2020-03-10 11:12:58 +01:00 committed by GitHub
parent 5c861cfe6e
commit 696aa4ddaf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 817 additions and 246 deletions

View File

@ -20,12 +20,14 @@
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import java.io.IOException;
@ -48,23 +50,28 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
// From DateHistogramAggregationBuilder in core, transplanted and modified to a set
// so we don't need to import a dependency on the class
private static final Set<String> DATE_FIELD_UNITS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
"year",
"1y",
"quarter",
"1q",
"month",
"1M",
"week",
"1w",
"day",
"1d",
"hour",
"1h",
"minute",
"1m",
"second",
"1s")));
private static final Set<String> DATE_FIELD_UNITS = Collections.unmodifiableSet(
new HashSet<>(
Arrays.asList(
"year",
"1y",
"quarter",
"1q",
"month",
"1M",
"week",
"1w",
"day",
"1d",
"hour",
"1h",
"minute",
"1m",
"second",
"1s"
)
)
);
/**
* Interval can be specified in 2 ways:
@ -76,6 +83,7 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
*/
public interface Interval extends ToXContentFragment {
String getName();
DateHistogramInterval getInterval();
}
@ -131,8 +139,9 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
public CalendarInterval(DateHistogramInterval interval) {
this.interval = interval;
if (DATE_FIELD_UNITS.contains(interval.toString()) == false) {
throw new IllegalArgumentException("The supplied interval [" + interval + "] could not be parsed " +
"as a calendar interval.");
throw new IllegalArgumentException(
"The supplied interval [" + interval + "] could not be parsed " + "as a calendar interval."
);
}
}
@ -173,33 +182,35 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
}
}
private static final ConstructingObjectParser<DateHistogramGroupSource, Void> PARSER =
new ConstructingObjectParser<>("date_histogram_group_source",
true,
(args) -> {
String field = (String)args[0];
String fixedInterval = (String) args[1];
String calendarInterval = (String) args[2];
private static final ConstructingObjectParser<DateHistogramGroupSource, Void> PARSER = new ConstructingObjectParser<>(
"date_histogram_group_source",
true,
(args) -> {
String field = (String) args[0];
Script script = (Script) args[1];
String fixedInterval = (String) args[2];
String calendarInterval = (String) args[3];
ZoneId zoneId = (ZoneId) args[4];
Interval interval = null;
Interval interval = null;
if (fixedInterval != null && calendarInterval != null) {
throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found both");
} else if (fixedInterval != null) {
interval = new FixedInterval(new DateHistogramInterval(fixedInterval));
} else if (calendarInterval != null) {
interval = new CalendarInterval(new DateHistogramInterval(calendarInterval));
} else {
throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found none");
}
if (fixedInterval != null && calendarInterval != null) {
throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found both");
} else if (fixedInterval != null) {
interval = new FixedInterval(new DateHistogramInterval(fixedInterval));
} else if (calendarInterval != null) {
interval = new CalendarInterval(new DateHistogramInterval(calendarInterval));
} else {
throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found none");
}
ZoneId zoneId = (ZoneId) args[3];
return new DateHistogramGroupSource(field, interval, zoneId);
});
return new DateHistogramGroupSource(field, script, interval, zoneId);
}
);
static {
PARSER.declareString(optionalConstructorArg(), FIELD);
Script.declareScript(PARSER, optionalConstructorArg(), SCRIPT);
PARSER.declareString(optionalConstructorArg(), new ParseField(FixedInterval.NAME));
PARSER.declareString(optionalConstructorArg(), new ParseField(CalendarInterval.NAME));
@ -219,8 +230,8 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
private final Interval interval;
private final ZoneId timeZone;
DateHistogramGroupSource(String field, Interval interval, ZoneId timeZone) {
super(field);
DateHistogramGroupSource(String field, Script script, Interval interval, ZoneId timeZone) {
super(field, script);
this.interval = interval;
this.timeZone = timeZone;
}
@ -241,9 +252,7 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (field != null) {
builder.field(FIELD.getPreferredName(), field);
}
super.innerXContent(builder, params);
interval.toXContent(builder, params);
if (timeZone != null) {
builder.field(TIME_ZONE.getPreferredName(), timeZone.toString());
@ -264,9 +273,9 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
final DateHistogramGroupSource that = (DateHistogramGroupSource) other;
return Objects.equals(this.field, that.field) &&
Objects.equals(this.interval, that.interval) &&
Objects.equals(this.timeZone, that.timeZone);
return Objects.equals(this.field, that.field)
&& Objects.equals(this.interval, that.interval)
&& Objects.equals(this.timeZone, that.timeZone);
}
@Override
@ -274,6 +283,11 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
return Objects.hash(field, interval, timeZone);
}
@Override
public String toString() {
return Strings.toString(this, true, true);
}
public static Builder builder() {
return new Builder();
}
@ -281,6 +295,7 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
public static class Builder {
private String field;
private Script script;
private Interval interval;
private ZoneId timeZone;
@ -294,6 +309,16 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
return this;
}
/**
* The script with which to construct the date histogram grouping
* @param script The script
* @return The {@link Builder} with the script set.
*/
public Builder setScript(Script script) {
this.script = script;
return this;
}
/**
* Set the interval for the DateHistogram grouping
* @param interval a fixed or calendar interval
@ -315,7 +340,7 @@ public class DateHistogramGroupSource extends SingleGroupSource implements ToXCo
}
public DateHistogramGroupSource build() {
return new DateHistogramGroupSource(field, interval, timeZone);
return new DateHistogramGroupSource(field, script, interval, timeZone);
}
}
}

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import java.io.IOException;
import java.util.Objects;
@ -37,12 +38,15 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
public class HistogramGroupSource extends SingleGroupSource implements ToXContentObject {
protected static final ParseField INTERVAL = new ParseField("interval");
private static final ConstructingObjectParser<HistogramGroupSource, Void> PARSER =
new ConstructingObjectParser<>("histogram_group_source", true,
args -> new HistogramGroupSource((String) args[0], (double) args[1]));
private static final ConstructingObjectParser<HistogramGroupSource, Void> PARSER = new ConstructingObjectParser<>(
"histogram_group_source",
true,
args -> new HistogramGroupSource((String) args[0], (Script) args[1], (double) args[2])
);
static {
PARSER.declareString(optionalConstructorArg(), FIELD);
Script.declareScript(PARSER, optionalConstructorArg(), SCRIPT);
PARSER.declareDouble(optionalConstructorArg(), INTERVAL);
}
@ -52,8 +56,8 @@ public class HistogramGroupSource extends SingleGroupSource implements ToXConten
private final double interval;
HistogramGroupSource(String field, double interval) {
super(field);
HistogramGroupSource(String field, Script script, double interval) {
super(field, script);
if (interval <= 0) {
throw new IllegalArgumentException("[interval] must be greater than 0.");
}
@ -72,9 +76,7 @@ public class HistogramGroupSource extends SingleGroupSource implements ToXConten
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
if (field != null) {
builder.field(FIELD.getPreferredName(), field);
}
super.innerXContent(builder, params);
builder.field(INTERVAL.getPreferredName(), interval);
builder.endObject();
return builder;
@ -92,8 +94,7 @@ public class HistogramGroupSource extends SingleGroupSource implements ToXConten
final HistogramGroupSource that = (HistogramGroupSource) other;
return Objects.equals(this.field, that.field) &&
Objects.equals(this.interval, that.interval);
return Objects.equals(this.field, that.field) && Objects.equals(this.interval, that.interval);
}
@Override
@ -108,6 +109,7 @@ public class HistogramGroupSource extends SingleGroupSource implements ToXConten
public static class Builder {
private String field;
private Script script;
private double interval;
/**
@ -130,8 +132,18 @@ public class HistogramGroupSource extends SingleGroupSource implements ToXConten
return this;
}
/**
* The script with which to construct the histogram grouping
* @param script The script
* @return The {@link Builder} with the script set.
*/
public Builder setScript(Script script) {
this.script = script;
return this;
}
public HistogramGroupSource build() {
return new HistogramGroupSource(field, interval);
return new HistogramGroupSource(field, script, interval);
}
}
}

View File

@ -21,13 +21,17 @@ package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.script.Script;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
public abstract class SingleGroupSource implements ToXContentObject {
protected static final ParseField FIELD = new ParseField("field");
protected static final ParseField SCRIPT = new ParseField("script");
public enum Type {
TERMS,
@ -40,9 +44,11 @@ public abstract class SingleGroupSource implements ToXContentObject {
}
protected final String field;
protected final Script script;
public SingleGroupSource(final String field) {
public SingleGroupSource(final String field, final Script script) {
this.field = field;
this.script = script;
}
public abstract Type getType();
@ -51,6 +57,19 @@ public abstract class SingleGroupSource implements ToXContentObject {
return field;
}
public Script getScript() {
return script;
}
protected void innerXContent(XContentBuilder builder, Params params) throws IOException {
if (field != null) {
builder.field(FIELD.getPreferredName(), field);
}
if (script != null) {
builder.field(SCRIPT.getPreferredName(), script);
}
}
@Override
public boolean equals(Object other) {
if (this == other) {
@ -63,11 +82,11 @@ public abstract class SingleGroupSource implements ToXContentObject {
final SingleGroupSource that = (SingleGroupSource) other;
return Objects.equals(this.field, that.field);
return Objects.equals(this.field, that.field) && Objects.equals(this.script, that.script);
}
@Override
public int hashCode() {
return Objects.hash(field);
return Objects.hash(field, script);
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import java.io.IOException;
@ -31,19 +32,23 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
public class TermsGroupSource extends SingleGroupSource implements ToXContentObject {
private static final ConstructingObjectParser<TermsGroupSource, Void> PARSER =
new ConstructingObjectParser<>("terms_group_source", true, args -> new TermsGroupSource((String) args[0]));
private static final ConstructingObjectParser<TermsGroupSource, Void> PARSER = new ConstructingObjectParser<>(
"terms_group_source",
true,
args -> new TermsGroupSource((String) args[0], (Script) args[1])
);
static {
PARSER.declareString(optionalConstructorArg(), FIELD);
Script.declareScript(PARSER, optionalConstructorArg(), SCRIPT);
}
public static TermsGroupSource fromXContent(final XContentParser parser) {
return PARSER.apply(parser, null);
}
TermsGroupSource(final String field) {
super(field);
TermsGroupSource(final String field, final Script script) {
super(field, script);
}
@Override
@ -54,9 +59,7 @@ public class TermsGroupSource extends SingleGroupSource implements ToXContentObj
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
if (field != null) {
builder.field(FIELD.getPreferredName(), field);
}
super.innerXContent(builder, params);
builder.endObject();
return builder;
}
@ -68,6 +71,7 @@ public class TermsGroupSource extends SingleGroupSource implements ToXContentObj
public static class Builder {
private String field;
private Script script;
/**
* The field with which to construct the date histogram grouping
@ -79,8 +83,18 @@ public class TermsGroupSource extends SingleGroupSource implements ToXContentObj
return this;
}
/**
* The script with which to construct the terms grouping
* @param script The script
* @return The {@link Builder} with the script set.
*/
public Builder setScript(Script script) {
this.script = script;
return this;
}
public TermsGroupSource build() {
return new TermsGroupSource(field);
return new TermsGroupSource(field, script);
}
}
}

View File

@ -20,10 +20,12 @@
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
public class DateHistogramGroupSourceTests extends AbstractXContentTestCase<DateHistogramGroupSource> {
@ -37,9 +39,9 @@ public class DateHistogramGroupSourceTests extends AbstractXContentTestCase<Date
public static DateHistogramGroupSource randomDateHistogramGroupSource() {
String field = randomAlphaOfLengthBetween(1, 20);
return new DateHistogramGroupSource(field,
randomDateHistogramInterval(),
randomBoolean() ? randomZone() : null);
Script script = randomBoolean() ? new Script(randomAlphaOfLengthBetween(1, 10)) : null;
return new DateHistogramGroupSource(field, script, randomDateHistogramInterval(), randomBoolean() ? randomZone() : null);
}
@Override
@ -56,4 +58,10 @@ public class DateHistogramGroupSourceTests extends AbstractXContentTestCase<Date
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
}

View File

@ -20,16 +20,19 @@
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
public class HistogramGroupSourceTests extends AbstractXContentTestCase<HistogramGroupSource> {
public static HistogramGroupSource randomHistogramGroupSource() {
String field = randomAlphaOfLengthBetween(1, 20);
Script script = randomBoolean() ? new Script(randomAlphaOfLengthBetween(1, 10)) : null;
double interval = randomDoubleBetween(Math.nextUp(0), Double.MAX_VALUE, false);
return new HistogramGroupSource(field, interval);
return new HistogramGroupSource(field, script, interval);
}
@Override
@ -46,4 +49,10 @@ public class HistogramGroupSourceTests extends AbstractXContentTestCase<Histogra
protected HistogramGroupSource createTestInstance() {
return randomHistogramGroupSource();
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
}

View File

@ -20,14 +20,17 @@
package org.elasticsearch.client.transform.transforms.pivot;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.function.Predicate;
public class TermsGroupSourceTests extends AbstractXContentTestCase<TermsGroupSource> {
public static TermsGroupSource randomTermsGroupSource() {
return new TermsGroupSource(randomAlphaOfLengthBetween(1, 20));
Script script = randomBoolean() ? new Script(randomAlphaOfLengthBetween(1, 10)) : null;
return new TermsGroupSource(randomAlphaOfLengthBetween(1, 20), script);
}
@Override
@ -44,4 +47,10 @@ public class TermsGroupSourceTests extends AbstractXContentTestCase<TermsGroupSo
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// allow unknown fields in the root of the object only
return field -> !field.isEmpty();
}
}

View File

@ -20,26 +20,68 @@
package org.elasticsearch.client.transform.transforms.pivot.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.xpack.core.transform.transforms.pivot.DateHistogramGroupSource;
import org.elasticsearch.xpack.core.transform.transforms.pivot.ScriptConfig;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo;
public class DateHistogramGroupSourceTests extends AbstractResponseTestCase<
DateHistogramGroupSource,
org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource> {
DateHistogramGroupSource,
org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource> {
public static ScriptConfig randomScriptConfig() {
ScriptType type = randomFrom(ScriptType.values());
String lang = randomBoolean() ? Script.DEFAULT_SCRIPT_LANG : randomAlphaOfLengthBetween(1, 20);
String idOrCode = randomAlphaOfLengthBetween(1, 20);
Map<String, Object> params = Collections.emptyMap();
type = ScriptType.STORED;
Script script = new Script(type, type == ScriptType.STORED ? null : lang, idOrCode, params);
LinkedHashMap<String, Object> source = null;
try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) {
XContentBuilder content = script.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
source = (LinkedHashMap<String, Object>) XContentHelper.convertToMap(BytesReference.bytes(content), true, XContentType.JSON)
.v2();
} catch (IOException e) {
// should not happen
fail("failed to create random script config");
}
return new ScriptConfig(source, script);
}
public static DateHistogramGroupSource randomDateHistogramGroupSource() {
String field = randomAlphaOfLengthBetween(1, 20);
DateHistogramGroupSource dateHistogramGroupSource; // = new DateHistogramGroupSource(field);
String field = randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20);
ScriptConfig scriptConfig = randomBoolean() ? null : randomScriptConfig();
DateHistogramGroupSource dateHistogramGroupSource;
if (randomBoolean()) {
dateHistogramGroupSource = new DateHistogramGroupSource(field, new DateHistogramGroupSource.FixedInterval(
new DateHistogramInterval(randomPositiveTimeValue())));
dateHistogramGroupSource = new DateHistogramGroupSource(
field,
scriptConfig,
new DateHistogramGroupSource.FixedInterval(new DateHistogramInterval(randomPositiveTimeValue()))
);
} else {
dateHistogramGroupSource = new DateHistogramGroupSource(field, new DateHistogramGroupSource.CalendarInterval(
new DateHistogramInterval(randomTimeValue(1,1, "m", "h", "d", "w"))));
dateHistogramGroupSource = new DateHistogramGroupSource(
field,
scriptConfig,
new DateHistogramGroupSource.CalendarInterval(new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w")))
);
}
if (randomBoolean()) {
@ -59,16 +101,25 @@ public class DateHistogramGroupSourceTests extends AbstractResponseTestCase<
}
@Override
protected void assertInstances(DateHistogramGroupSource serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource clientInstance) {
protected void assertInstances(
DateHistogramGroupSource serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource clientInstance
) {
assertThat(serverTestInstance.getField(), equalTo(clientInstance.getField()));
if (serverTestInstance.getScriptConfig() != null) {
assertThat(serverTestInstance.getScriptConfig().getScript(), equalTo(clientInstance.getScript()));
} else {
assertNull(clientInstance.getScript());
}
assertSameInterval(serverTestInstance.getInterval(), clientInstance.getInterval());
assertThat(serverTestInstance.getTimeZone(), equalTo(clientInstance.getTimeZone()));
assertThat(serverTestInstance.getType().name(), equalTo(clientInstance.getType().name()));
}
private void assertSameInterval(DateHistogramGroupSource.Interval serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource.Interval clientInstance) {
private void assertSameInterval(
DateHistogramGroupSource.Interval serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.DateHistogramGroupSource.Interval clientInstance
) {
assertEquals(serverTestInstance.getName(), clientInstance.getName());
assertEquals(serverTestInstance.getInterval(), clientInstance.getInterval());
}

View File

@ -0,0 +1,69 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.pivot.HistogramGroupSource;
import org.elasticsearch.xpack.core.transform.transforms.pivot.ScriptConfig;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
public class HistogramGroupSourceTests extends AbstractResponseTestCase<
HistogramGroupSource,
org.elasticsearch.client.transform.transforms.pivot.HistogramGroupSource> {
public static HistogramGroupSource randomHistogramGroupSource() {
String field = randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20);
ScriptConfig scriptConfig = randomBoolean() ? null : DateHistogramGroupSourceTests.randomScriptConfig();
double interval = randomDoubleBetween(Math.nextUp(0), Double.MAX_VALUE, false);
return new HistogramGroupSource(field, scriptConfig, interval);
}
@Override
protected HistogramGroupSource createServerTestInstance(XContentType xContentType) {
return randomHistogramGroupSource();
}
@Override
protected org.elasticsearch.client.transform.transforms.pivot.HistogramGroupSource doParseToClientInstance(XContentParser parser)
throws IOException {
return org.elasticsearch.client.transform.transforms.pivot.HistogramGroupSource.fromXContent(parser);
}
@Override
protected void assertInstances(
HistogramGroupSource serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.HistogramGroupSource clientInstance
) {
assertThat(serverTestInstance.getField(), equalTo(clientInstance.getField()));
if (serverTestInstance.getScriptConfig() != null) {
assertThat(serverTestInstance.getScriptConfig().getScript(), equalTo(clientInstance.getScript()));
} else {
assertNull(clientInstance.getScript());
}
assertThat(serverTestInstance.getInterval(), equalTo(clientInstance.getInterval()));
}
}

View File

@ -0,0 +1,67 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.transform.transforms.pivot.hlrc;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.core.transform.transforms.pivot.ScriptConfig;
import org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSource;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
public class TermsGroupSourceTests extends AbstractResponseTestCase<
TermsGroupSource,
org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource> {
public static TermsGroupSource randomTermsGroupSource() {
String field = randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20);
ScriptConfig scriptConfig = randomBoolean() ? null : DateHistogramGroupSourceTests.randomScriptConfig();
return new TermsGroupSource(field, scriptConfig);
}
@Override
protected TermsGroupSource createServerTestInstance(XContentType xContentType) {
return randomTermsGroupSource();
}
@Override
protected org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource doParseToClientInstance(XContentParser parser)
throws IOException {
return org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource.fromXContent(parser);
}
@Override
protected void assertInstances(
TermsGroupSource serverTestInstance,
org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource clientInstance
) {
assertThat(serverTestInstance.getField(), equalTo(clientInstance.getField()));
if (serverTestInstance.getScriptConfig() != null) {
assertThat(serverTestInstance.getScriptConfig().getScript(), equalTo(clientInstance.getScript()));
} else {
assertNull(clientInstance.getScript());
}
}
}

View File

@ -56,6 +56,7 @@ public class TransformMessages {
public static final String TRANSFORM_UPDATE_CANNOT_CHANGE_SYNC_METHOD =
"Cannot change the current sync configuration of transform [{0}] from [{1}] to [{2}]";
public static final String LOG_TRANSFORM_CONFIGURATION_BAD_QUERY = "Failed to parse query for transform";
public static final String LOG_TRANSFORM_CONFIGURATION_BAD_SCRIPT = "Failed to parse script for transform";
public static final String LOG_TRANSFORM_CONFIGURATION_BAD_GROUP_BY = "Failed to parse group_by for pivot transform";
public static final String LOG_TRANSFORM_CONFIGURATION_BAD_AGGREGATION = "Failed to parse aggregation for pivot transform";
public static final String LOG_TRANSFORM_PIVOT_REDUCE_PAGE_SIZE =

View File

@ -27,7 +27,6 @@ import java.util.Set;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class DateHistogramGroupSource extends SingleGroupSource {
private static final int CALENDAR_INTERVAL_ID = 1;
@ -43,7 +42,9 @@ public class DateHistogramGroupSource extends SingleGroupSource {
*/
public interface Interval extends Writeable, ToXContentFragment {
String getName();
DateHistogramInterval getInterval();
byte getIntervalTypeId();
}
@ -113,8 +114,9 @@ public class DateHistogramGroupSource extends SingleGroupSource {
public CalendarInterval(DateHistogramInterval interval) {
this.interval = interval;
if (DateHistogramAggregationBuilder.DATE_FIELD_UNITS.get(interval.toString()) == null) {
throw new IllegalArgumentException("The supplied interval [" + interval + "] could not be parsed " +
"as a calendar interval.");
throw new IllegalArgumentException(
"The supplied interval [" + interval + "] could not be parsed " + "as a calendar interval."
);
}
}
@ -172,12 +174,12 @@ public class DateHistogramGroupSource extends SingleGroupSource {
private Interval readInterval(StreamInput in) throws IOException {
byte id = in.readByte();
switch (id) {
case FIXED_INTERVAL_ID:
return new FixedInterval(in);
case CALENDAR_INTERVAL_ID:
return new CalendarInterval(in);
default:
throw new IllegalArgumentException("unknown interval type [" + id + "]");
case FIXED_INTERVAL_ID:
return new FixedInterval(in);
case CALENDAR_INTERVAL_ID:
return new CalendarInterval(in);
default:
throw new IllegalArgumentException("unknown interval type [" + id + "]");
}
}
@ -195,8 +197,8 @@ public class DateHistogramGroupSource extends SingleGroupSource {
private final Interval interval;
private ZoneId timeZone;
public DateHistogramGroupSource(String field, Interval interval) {
super(field);
public DateHistogramGroupSource(String field, ScriptConfig scriptConfig, Interval interval) {
super(field, scriptConfig);
this.interval = interval;
}
@ -213,8 +215,9 @@ public class DateHistogramGroupSource extends SingleGroupSource {
private static ConstructingObjectParser<DateHistogramGroupSource, Void> createParser(boolean lenient) {
ConstructingObjectParser<DateHistogramGroupSource, Void> parser = new ConstructingObjectParser<>(NAME, lenient, (args) -> {
String field = (String) args[0];
String fixedInterval = (String) args[1];
String calendarInterval = (String) args[2];
ScriptConfig scriptConfig = (ScriptConfig) args[1];
String fixedInterval = (String) args[2];
String calendarInterval = (String) args[3];
Interval interval = null;
@ -228,10 +231,10 @@ public class DateHistogramGroupSource extends SingleGroupSource {
throw new IllegalArgumentException("You must specify either fixed_interval or calendar_interval, found none");
}
return new DateHistogramGroupSource(field, interval);
return new DateHistogramGroupSource(field, scriptConfig, interval);
});
declareValuesSourceFields(parser);
declareValuesSourceFields(parser, lenient);
parser.declareString(optionalConstructorArg(), new ParseField(FixedInterval.NAME));
parser.declareString(optionalConstructorArg(), new ParseField(CalendarInterval.NAME));
@ -270,7 +273,7 @@ public class DateHistogramGroupSource extends SingleGroupSource {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(field);
super.writeTo(out);
writeInterval(interval, out);
out.writeOptionalZoneId(timeZone);
// Format was optional in 7.2.x, removed in 7.3+
@ -282,9 +285,7 @@ public class DateHistogramGroupSource extends SingleGroupSource {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (field != null) {
builder.field(FIELD.getPreferredName(), field);
}
super.innerXContent(builder, params);
interval.toXContent(builder, params);
if (timeZone != null) {
builder.field(TIME_ZONE.getPreferredName(), timeZone.toString());
@ -305,9 +306,7 @@ public class DateHistogramGroupSource extends SingleGroupSource {
final DateHistogramGroupSource that = (DateHistogramGroupSource) other;
return Objects.equals(this.field, that.field) &&
Objects.equals(interval, that.interval) &&
Objects.equals(timeZone, that.timeZone);
return Objects.equals(this.field, that.field) && Objects.equals(interval, that.interval) && Objects.equals(timeZone, that.timeZone);
}
@Override

View File

@ -27,8 +27,8 @@ public class HistogramGroupSource extends SingleGroupSource {
private static final ConstructingObjectParser<HistogramGroupSource, Void> LENIENT_PARSER = createParser(true);
private final double interval;
public HistogramGroupSource(String field, double interval) {
super(field);
public HistogramGroupSource(String field, ScriptConfig scriptConfig, double interval) {
super(field, scriptConfig);
if (interval <= 0) {
throw new IllegalArgumentException("[interval] must be greater than 0.");
}
@ -43,10 +43,11 @@ public class HistogramGroupSource extends SingleGroupSource {
private static ConstructingObjectParser<HistogramGroupSource, Void> createParser(boolean lenient) {
ConstructingObjectParser<HistogramGroupSource, Void> parser = new ConstructingObjectParser<>(NAME, lenient, (args) -> {
String field = (String) args[0];
double interval = (double) args[1];
return new HistogramGroupSource(field, interval);
ScriptConfig scriptConfig = (ScriptConfig) args[1];
double interval = (double) args[2];
return new HistogramGroupSource(field, scriptConfig, interval);
});
declareValuesSourceFields(parser);
declareValuesSourceFields(parser, lenient);
parser.declareDouble(optionalConstructorArg(), INTERVAL);
return parser;
}
@ -62,7 +63,7 @@ public class HistogramGroupSource extends SingleGroupSource {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(field);
super.writeTo(out);
out.writeDouble(interval);
}
@ -73,9 +74,7 @@ public class HistogramGroupSource extends SingleGroupSource {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (field != null) {
builder.field(FIELD.getPreferredName(), field);
}
super.innerXContent(builder, params);
builder.field(INTERVAL.getPreferredName(), interval);
builder.endObject();
return builder;
@ -93,8 +92,7 @@ public class HistogramGroupSource extends SingleGroupSource {
final HistogramGroupSource that = (HistogramGroupSource) other;
return Objects.equals(this.field, that.field) &&
Objects.equals(this.interval, that.interval);
return Objects.equals(this.field, that.field) && Objects.equals(this.interval, that.interval);
}
@Override

View File

@ -0,0 +1,112 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.transform.transforms.pivot;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.script.Script;
import org.elasticsearch.xpack.core.transform.TransformMessages;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
public class ScriptConfig extends AbstractDiffable<ScriptConfig> implements Writeable, ToXContentObject {
private static final Logger logger = LogManager.getLogger(ScriptConfig.class);
// we store the in 2 formats: the raw format and the parsed format
private final Map<String, Object> source;
private final Script script;
public ScriptConfig(final Map<String, Object> source, Script script) {
this.source = source;
this.script = script;
}
public ScriptConfig(final StreamInput in) throws IOException {
source = in.readMap();
script = in.readOptionalWriteable(Script::new);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.map(source);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeMap(source);
out.writeOptionalWriteable(script);
}
public Script getScript() {
return script;
}
public static ScriptConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException {
// we need 2 passes, but the parser can not be cloned, so we parse 1st into a map and then re-parse that for syntax checking
// remember the registry, needed for the 2nd pass
NamedXContentRegistry registry = parser.getXContentRegistry();
Map<String, Object> source = parser.mapOrdered();
Script script = null;
try (
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source);
XContentParser sourceParser = XContentType.JSON.xContent()
.createParser(registry, LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(xContentBuilder).streamInput())
) {
script = Script.parse(sourceParser);
} catch (Exception e) {
if (lenient) {
logger.warn(TransformMessages.LOG_TRANSFORM_CONFIGURATION_BAD_SCRIPT, e);
} else {
throw e;
}
}
return new ScriptConfig(source, script);
}
@Override
public int hashCode() {
return Objects.hash(source, script);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
final ScriptConfig that = (ScriptConfig) other;
return Objects.equals(this.source, that.source) && Objects.equals(this.script, that.script);
}
public boolean isValid() {
return this.script != null;
}
}

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.core.transform.transforms.pivot;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
@ -45,14 +46,14 @@ public abstract class SingleGroupSource implements Writeable, ToXContentObject {
public static Type fromId(byte id) {
switch (id) {
case 0:
return TERMS;
case 1:
return HISTOGRAM;
case 2:
return DATE_HISTOGRAM;
default:
throw new IllegalArgumentException("unknown type");
case 0:
return TERMS;
case 1:
return HISTOGRAM;
case 2:
return DATE_HISTOGRAM;
default:
throw new IllegalArgumentException("unknown type");
}
}
@ -62,36 +63,53 @@ public abstract class SingleGroupSource implements Writeable, ToXContentObject {
}
protected static final ParseField FIELD = new ParseField("field");
protected static final ParseField SCRIPT = new ParseField("script");
// TODO: add script
protected final String field;
protected final ScriptConfig scriptConfig;
static <T> void declareValuesSourceFields(AbstractObjectParser<? extends SingleGroupSource, T> parser) {
// either script or field
static <T> void declareValuesSourceFields(AbstractObjectParser<? extends SingleGroupSource, T> parser, boolean lenient) {
parser.declareString(optionalConstructorArg(), FIELD);
parser.declareObject(optionalConstructorArg(), (p, c) -> ScriptConfig.fromXContent(p, lenient), SCRIPT);
}
public SingleGroupSource(final String field) {
public SingleGroupSource(final String field, final ScriptConfig scriptConfig) {
this.field = field;
this.scriptConfig = scriptConfig;
}
public SingleGroupSource(StreamInput in) throws IOException {
field = in.readOptionalString();
if (in.getVersion().onOrAfter(Version.V_7_7_0)) {
scriptConfig = in.readOptionalWriteable(ScriptConfig::new);
} else {
scriptConfig = null;
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
innerXContent(builder, params);
builder.endObject();
return builder;
}
protected void innerXContent(XContentBuilder builder, Params params) throws IOException {
if (field != null) {
builder.field(FIELD.getPreferredName(), field);
}
builder.endObject();
return builder;
if (scriptConfig != null) {
builder.field(SCRIPT.getPreferredName(), scriptConfig);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(field);
if (out.getVersion().onOrAfter(Version.V_7_7_0)) {
out.writeOptionalWriteable(scriptConfig);
}
}
public abstract Type getType();
@ -104,6 +122,10 @@ public abstract class SingleGroupSource implements Writeable, ToXContentObject {
return field;
}
public ScriptConfig getScriptConfig() {
return scriptConfig;
}
@Override
public boolean equals(Object other) {
if (this == other) {
@ -116,12 +138,12 @@ public abstract class SingleGroupSource implements Writeable, ToXContentObject {
final SingleGroupSource that = (SingleGroupSource) other;
return Objects.equals(this.field, that.field);
return Objects.equals(this.field, that.field) && Objects.equals(this.scriptConfig, that.scriptConfig);
}
@Override
public int hashCode() {
return Objects.hash(field);
return Objects.hash(field, scriptConfig);
}
@Override

View File

@ -27,15 +27,17 @@ public class TermsGroupSource extends SingleGroupSource {
private static ConstructingObjectParser<TermsGroupSource, Void> createParser(boolean lenient) {
ConstructingObjectParser<TermsGroupSource, Void> parser = new ConstructingObjectParser<>(NAME, lenient, (args) -> {
String field = (String) args[0];
return new TermsGroupSource(field);
ScriptConfig scriptConfig = (ScriptConfig) args[1];
return new TermsGroupSource(field, scriptConfig);
});
SingleGroupSource.declareValuesSourceFields(parser);
SingleGroupSource.declareValuesSourceFields(parser, lenient);
return parser;
}
public TermsGroupSource(final String field) {
super(field);
public TermsGroupSource(final String field, final ScriptConfig scriptConfig) {
super(field, scriptConfig);
}
public TermsGroupSource(StreamInput in) throws IOException {

View File

@ -19,14 +19,21 @@ import java.io.IOException;
public class DateHistogramGroupSourceTests extends AbstractSerializingTestCase<DateHistogramGroupSource> {
public static DateHistogramGroupSource randomDateHistogramGroupSource() {
String field = randomAlphaOfLengthBetween(1, 20);
String field = randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20);
ScriptConfig scriptConfig = randomBoolean() ? null : ScriptConfigTests.randomScriptConfig();
DateHistogramGroupSource dateHistogramGroupSource;
if (randomBoolean()) {
dateHistogramGroupSource = new DateHistogramGroupSource(field, new DateHistogramGroupSource.FixedInterval(
new DateHistogramInterval(randomPositiveTimeValue())));
dateHistogramGroupSource = new DateHistogramGroupSource(
field,
scriptConfig,
new DateHistogramGroupSource.FixedInterval(new DateHistogramInterval(randomPositiveTimeValue()))
);
} else {
dateHistogramGroupSource = new DateHistogramGroupSource(field, new DateHistogramGroupSource.CalendarInterval(
new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w"))));
dateHistogramGroupSource = new DateHistogramGroupSource(
field,
scriptConfig,
new DateHistogramGroupSource.CalendarInterval(new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w")))
);
}
if (randomBoolean()) {

View File

@ -15,9 +15,11 @@ import java.io.IOException;
public class HistogramGroupSourceTests extends AbstractSerializingTestCase<HistogramGroupSource> {
public static HistogramGroupSource randomHistogramGroupSource() {
String field = randomAlphaOfLengthBetween(1, 20);
String field = randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20);
ScriptConfig scriptConfig = randomBoolean() ? null : ScriptConfigTests.randomScriptConfig();
double interval = randomDoubleBetween(Math.nextUp(0), Double.MAX_VALUE, false);
return new HistogramGroupSource(field, interval);
return new HistogramGroupSource(field, scriptConfig, interval);
}
@Override

View File

@ -0,0 +1,100 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.transform.transforms.pivot;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.xpack.core.transform.transforms.AbstractSerializingTransformTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
public class ScriptConfigTests extends AbstractSerializingTransformTestCase<ScriptConfig> {
private boolean lenient;
public static ScriptConfig randomScriptConfig() {
ScriptType type = randomFrom(ScriptType.values());
String lang = randomBoolean() ? Script.DEFAULT_SCRIPT_LANG : randomAlphaOfLengthBetween(1, 20);
String idOrCode = randomAlphaOfLengthBetween(1, 20);
Map<String, Object> params = Collections.emptyMap();
type = ScriptType.STORED;
Script script = new Script(type, type == ScriptType.STORED ? null : lang, idOrCode, params);
LinkedHashMap<String, Object> source = null;
try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) {
XContentBuilder content = script.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
source = (LinkedHashMap<String, Object>) XContentHelper.convertToMap(BytesReference.bytes(content), true, XContentType.JSON)
.v2();
} catch (IOException e) {
// should not happen
fail("failed to create random script config");
}
return new ScriptConfig(source, script);
}
public static ScriptConfig randomInvalidScriptConfig() {
// create something broken but with a source
LinkedHashMap<String, Object> source = new LinkedHashMap<>();
for (String key : randomUnique(() -> randomAlphaOfLengthBetween(1, 20), randomIntBetween(1, 10))) {
source.put(key, randomAlphaOfLengthBetween(1, 20));
}
return new ScriptConfig(source, null);
}
@Before
public void setRandomFeatures() {
lenient = randomBoolean();
}
@Override
protected ScriptConfig doParseInstance(XContentParser parser) throws IOException {
return ScriptConfig.fromXContent(parser, lenient);
}
@Override
protected Reader<ScriptConfig> instanceReader() {
return ScriptConfig::new;
}
@Override
protected ScriptConfig createTestInstance() {
return lenient ? randomBoolean() ? randomScriptConfig() : randomInvalidScriptConfig() : randomScriptConfig();
}
public void testFailOnStrictPassOnLenient() throws IOException {
// use a wrong syntax to trigger a parsing exception for strict parsing
String source = "{\n" + " \"source-code\": \"a=b\"" + " }";
// lenient, passes but reports invalid
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
ScriptConfig scriptConfig = ScriptConfig.fromXContent(parser, true);
assertFalse(scriptConfig.isValid());
}
// strict throws
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
expectThrows(XContentParseException.class, () -> ScriptConfig.fromXContent(parser, false));
}
}
}

View File

@ -15,9 +15,10 @@ import java.io.IOException;
public class TermsGroupSourceTests extends AbstractSerializingTestCase<TermsGroupSource> {
public static TermsGroupSource randomTermsGroupSource() {
String field = randomAlphaOfLengthBetween(1, 20);
String field = randomBoolean() ? null : randomAlphaOfLengthBetween(1, 20);
ScriptConfig scriptConfig = randomBoolean() ? null : ScriptConfigTests.randomScriptConfig();
return new TermsGroupSource(field);
return new TermsGroupSource(field, scriptConfig);
}
@Override

View File

@ -16,11 +16,11 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.transform.transforms.DestConfig;
import org.elasticsearch.client.transform.transforms.TimeSyncConfig;
import org.elasticsearch.client.transform.transforms.TransformConfig;
import org.elasticsearch.client.transform.transforms.TransformConfigUpdate;
import org.elasticsearch.client.transform.transforms.TransformStats;
import org.elasticsearch.client.transform.transforms.DestConfig;
import org.elasticsearch.client.transform.transforms.TimeSyncConfig;
import org.elasticsearch.client.transform.transforms.pivot.SingleGroupSource;
import org.elasticsearch.client.transform.transforms.pivot.TermsGroupSource;
import org.elasticsearch.common.bytes.BytesReference;
@ -65,11 +65,7 @@ public class TransformIT extends TransformIntegTestCase {
.addAggregator(AggregationBuilders.avg("review_score").field("stars"))
.addAggregator(AggregationBuilders.max("timestamp").field("timestamp"));
TransformConfig config = createTransformConfig("transform-crud",
groups,
aggs,
"reviews-by-user-business-day",
indexName);
TransformConfig config = createTransformConfig("transform-crud", groups, aggs, "reviews-by-user-business-day", indexName);
assertTrue(putTransform(config, RequestOptions.DEFAULT).isAcknowledged());
assertTrue(startTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged());
@ -98,27 +94,22 @@ public class TransformIT extends TransformIntegTestCase {
.addAggregator(AggregationBuilders.avg("review_score").field("stars"))
.addAggregator(AggregationBuilders.max("timestamp").field("timestamp"));
TransformConfig config = createTransformConfigBuilder("transform-crud",
TransformConfig config = createTransformConfigBuilder(
"transform-crud",
groups,
aggs,
"reviews-by-user-business-day",
QueryBuilders.matchAllQuery(),
indexName)
.setSyncConfig(new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1)))
.build();
indexName
).setSyncConfig(new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1))).build();
assertTrue(putTransform(config, RequestOptions.DEFAULT).isAcknowledged());
assertTrue(startTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged());
waitUntilCheckpoint(config.getId(), 1L);
assertThat(getTransformStats(config.getId()).getTransformsStats().get(0).getState(),
equalTo(TransformStats.State.STARTED));
assertThat(getTransformStats(config.getId()).getTransformsStats().get(0).getState(), equalTo(TransformStats.State.STARTED));
long docsIndexed = getTransformStats(config.getId())
.getTransformsStats()
.get(0)
.getIndexerStats()
.getNumDocuments();
long docsIndexed = getTransformStats(config.getId()).getTransformsStats().get(0).getIndexerStats().getNumDocuments();
TransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0);
assertThat(storedConfig.getVersion(), equalTo(Version.CURRENT));
@ -132,11 +123,10 @@ public class TransformIT extends TransformIntegTestCase {
waitUntilCheckpoint(config.getId(), 2L);
// Assert that we wrote the new docs
assertThat(getTransformStats(config.getId())
.getTransformsStats()
.get(0)
.getIndexerStats()
.getNumDocuments(), greaterThan(docsIndexed));
assertThat(
getTransformStats(config.getId()).getTransformsStats().get(0).getIndexerStats().getNumDocuments(),
greaterThan(docsIndexed)
);
stopTransform(config.getId());
deleteTransform(config.getId());
@ -155,12 +145,7 @@ public class TransformIT extends TransformIntegTestCase {
String id = "transform-to-update";
String dest = "reviews-by-user-business-day-to-update";
TransformConfig config = createTransformConfigBuilder(id,
groups,
aggs,
dest,
QueryBuilders.matchAllQuery(),
indexName)
TransformConfig config = createTransformConfigBuilder(id, groups, aggs, dest, QueryBuilders.matchAllQuery(), indexName)
.setSyncConfig(new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1)))
.build();
@ -168,14 +153,12 @@ public class TransformIT extends TransformIntegTestCase {
assertTrue(startTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged());
waitUntilCheckpoint(config.getId(), 1L);
assertThat(getTransformStats(config.getId()).getTransformsStats().get(0).getState(),
oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING));
assertThat(
getTransformStats(config.getId()).getTransformsStats().get(0).getState(),
oneOf(TransformStats.State.STARTED, TransformStats.State.INDEXING)
);
long docsIndexed = getTransformStats(config.getId())
.getTransformsStats()
.get(0)
.getIndexerStats()
.getNumDocuments();
long docsIndexed = getTransformStats(config.getId()).getTransformsStats().get(0).getIndexerStats().getNumDocuments();
TransformConfig storedConfig = getTransform(config.getId()).getTransformConfigurations().get(0);
assertThat(storedConfig.getVersion(), equalTo(Version.CURRENT));
@ -189,8 +172,7 @@ public class TransformIT extends TransformIntegTestCase {
.build();
RestHighLevelClient hlrc = new TestRestHighLevelClient();
final XContentBuilder pipelineBuilder = jsonBuilder()
.startObject()
final XContentBuilder pipelineBuilder = jsonBuilder().startObject()
.startArray("processors")
.startObject()
.startObject("set")
@ -200,8 +182,11 @@ public class TransformIT extends TransformIntegTestCase {
.endObject()
.endArray()
.endObject();
hlrc.ingest().putPipeline(new PutPipelineRequest(pipelineId, BytesReference.bytes(pipelineBuilder), XContentType.JSON),
RequestOptions.DEFAULT);
hlrc.ingest()
.putPipeline(
new PutPipelineRequest(pipelineId, BytesReference.bytes(pipelineBuilder), XContentType.JSON),
RequestOptions.DEFAULT
);
updateConfig(id, update);
@ -212,18 +197,13 @@ public class TransformIT extends TransformIntegTestCase {
// Since updates are loaded on checkpoint start, we should see the updated config on this next run
waitUntilCheckpoint(config.getId(), 2L);
long numDocsAfterCp2 = getTransformStats(config.getId())
.getTransformsStats()
.get(0)
.getIndexerStats()
.getNumDocuments();
long numDocsAfterCp2 = getTransformStats(config.getId()).getTransformsStats().get(0).getIndexerStats().getNumDocuments();
assertThat(numDocsAfterCp2, greaterThan(docsIndexed));
final SearchRequest searchRequest = new SearchRequest(dest)
.source(new SearchSourceBuilder()
.trackTotalHits(true)
.query(QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("static_forty_two", 42))));
final SearchRequest searchRequest = new SearchRequest(dest).source(
new SearchSourceBuilder().trackTotalHits(true)
.query(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery("static_forty_two", 42)))
);
// assert that we have the new field and its value is 42 in at least some docs
assertBusy(() -> {
final SearchResponse searchResponse = hlrc.search(searchRequest, RequestOptions.DEFAULT);
@ -249,14 +229,14 @@ public class TransformIT extends TransformIntegTestCase {
.addAggregator(AggregationBuilders.avg("review_score").field("stars"))
.addAggregator(AggregationBuilders.max("timestamp").field("timestamp"));
TransformConfig config = createTransformConfigBuilder(transformId,
TransformConfig config = createTransformConfigBuilder(
transformId,
groups,
aggs,
"reviews-by-user-business-day",
QueryBuilders.matchAllQuery(),
indexName)
.setSyncConfig(new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1)))
.build();
indexName
).setSyncConfig(new TimeSyncConfig("timestamp", TimeValue.timeValueSeconds(1))).build();
assertTrue(putTransform(config, RequestOptions.DEFAULT).isAcknowledged());
assertTrue(startTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged());

View File

@ -104,6 +104,58 @@ public class TransformPivotRestIT extends TransformRestTestCase {
assertOnePivotValue(transformIndex + "/_search?q=reviewer:user_26", 3.918918918);
}
public void testSimplePivotWithScript() throws Exception {
String transformId = "simple-pivot-script";
String transformIndex = "pivot_reviews_script";
setupDataAccessRole(DATA_ACCESS_ROLE, REVIEWS_INDEX_NAME, transformIndex);
final Request createTransformRequest = createRequestWithAuth(
"PUT",
getTransformEndpoint() + transformId,
BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS
);
// same pivot as testSimplePivot, but we retrieve the grouping key using a script and add prefix
String config = "{"
+ " \"dest\": {\"index\":\""
+ transformIndex
+ "\"},"
+ " \"source\": {\"index\":\""
+ REVIEWS_INDEX_NAME
+ "\"},"
+ " \"pivot\": {"
+ " \"group_by\": {"
+ " \"reviewer\": {"
+ " \"terms\": {"
+ " \"script\": {"
+ " \"source\": \"'reviewer_' + doc['user_id'].value\""
+ " } } } },"
+ " \"aggregations\": {"
+ " \"avg_rating\": {"
+ " \"avg\": {"
+ " \"field\": \"stars\""
+ " } } } },"
+ "\"frequency\":\"1s\""
+ "}";
createTransformRequest.setJsonEntity(config);
Map<String, Object> createTransformResponse = entityAsMap(client().performRequest(createTransformRequest));
assertThat(createTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE));
startAndWaitForTransform(transformId, transformIndex, BASIC_AUTH_VALUE_TRANSFORM_ADMIN_WITH_SOME_DATA_ACCESS);
// we expect 27 documents as there shall be 27 user_id's
Map<String, Object> indexStats = getAsMap(transformIndex + "/_stats");
assertEquals(27, XContentMapValues.extractValue("_all.total.docs.count", indexStats));
// get and check some users
assertOnePivotValue(transformIndex + "/_search?q=reviewer:reviewer_user_0", 3.776978417);
assertOnePivotValue(transformIndex + "/_search?q=reviewer:reviewer_user_5", 3.72);
assertOnePivotValue(transformIndex + "/_search?q=reviewer:reviewer_user_11", 3.846153846);
assertOnePivotValue(transformIndex + "/_search?q=reviewer:reviewer_user_20", 3.769230769);
assertOnePivotValue(transformIndex + "/_search?q=reviewer:reviewer_user_26", 3.918918918);
}
public void testPivotWithPipeline() throws Exception {
String transformId = "simple_pivot_with_pipeline";
String transformIndex = "pivot_with_pipeline";

View File

@ -23,11 +23,11 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformProgress;
import org.elasticsearch.xpack.core.transform.transforms.DestConfig;
import org.elasticsearch.xpack.core.transform.transforms.QueryConfig;
import org.elasticsearch.xpack.core.transform.transforms.SourceConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformProgress;
import org.elasticsearch.xpack.core.transform.transforms.pivot.AggregationConfig;
import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig;
import org.elasticsearch.xpack.core.transform.transforms.pivot.HistogramGroupSource;
@ -123,7 +123,7 @@ public class TransformProgressIT extends ESRestTestCase {
DestConfig destConfig = new DestConfig("unnecessary", null);
GroupConfig histgramGroupConfig = new GroupConfig(
Collections.emptyMap(),
Collections.singletonMap("every_50", new HistogramGroupSource("count", 50.0))
Collections.singletonMap("every_50", new HistogramGroupSource("count", null, 50.0))
);
AggregatorFactories.Builder aggs = new AggregatorFactories.Builder();
aggs.addAggregator(AggregationBuilders.avg("avg_rating").field("stars"));
@ -169,7 +169,7 @@ public class TransformProgressIT extends ESRestTestCase {
histgramGroupConfig = new GroupConfig(
Collections.emptyMap(),
Collections.singletonMap("every_50", new HistogramGroupSource("missing_field", 50.0))
Collections.singletonMap("every_50", new HistogramGroupSource("missing_field", null, 50.0))
);
pivotConfig = new PivotConfig(histgramGroupConfig, aggregationConfig, null);
config = new TransformConfig("get_progress_transform", sourceConfig, destConfig, null, null, null, pivotConfig, null);

View File

@ -42,41 +42,40 @@ public final class TransformProgressGatherer {
public void getInitialProgress(QueryBuilder filterQuery, TransformConfig config, ActionListener<TransformProgress> progressListener) {
SearchRequest request = getSearchRequest(config, filterQuery);
ActionListener<SearchResponse> searchResponseActionListener = ActionListener
.wrap(
searchResponse -> progressListener.onResponse(searchResponseToTransformProgressFunction().apply(searchResponse)),
progressListener::onFailure
);
ClientHelper
.executeWithHeadersAsync(
config.getHeaders(),
ClientHelper.TRANSFORM_ORIGIN,
client,
SearchAction.INSTANCE,
request,
searchResponseActionListener
);
ActionListener<SearchResponse> searchResponseActionListener = ActionListener.wrap(
searchResponse -> progressListener.onResponse(searchResponseToTransformProgressFunction().apply(searchResponse)),
progressListener::onFailure
);
ClientHelper.executeWithHeadersAsync(
config.getHeaders(),
ClientHelper.TRANSFORM_ORIGIN,
client,
SearchAction.INSTANCE,
request,
searchResponseActionListener
);
}
public static SearchRequest getSearchRequest(TransformConfig config, QueryBuilder filteredQuery) {
SearchRequest request = new SearchRequest(config.getSource().getIndex());
request.allowPartialSearchResults(false);
BoolQueryBuilder existsClauses = QueryBuilders.boolQuery();
config
.getPivotConfig()
config.getPivotConfig()
.getGroupConfig()
.getGroups()
.values()
// TODO change once we allow missing_buckets
.forEach(src -> existsClauses.must(QueryBuilders.existsQuery(src.getField())));
.forEach(src -> {
if (src.getField() != null) {
existsClauses.must(QueryBuilders.existsQuery(src.getField()));
}
});
request
.source(
new SearchSourceBuilder()
.size(0)
.trackTotalHits(true)
.query(QueryBuilders.boolQuery().filter(filteredQuery).filter(existsClauses))
);
request.source(
new SearchSourceBuilder().size(0)
.trackTotalHits(true)
.query(QueryBuilders.boolQuery().filter(filteredQuery).filter(existsClauses))
);
return request;
}

View File

@ -28,8 +28,8 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregati
import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.xpack.core.transform.TransformMessages;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats;
import org.elasticsearch.xpack.core.transform.transforms.SourceConfig;
import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats;
import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig;
import org.elasticsearch.xpack.core.transform.transforms.pivot.PivotConfig;
import org.elasticsearch.xpack.core.transform.transforms.pivot.SingleGroupSource;
@ -63,7 +63,7 @@ public class Pivot {
this.cachedCompositeAggregation = createCompositeAggregation(config);
boolean supportsIncrementalBucketUpdate = false;
for(Entry<String, SingleGroupSource> entry: config.getGroupConfig().getGroups().entrySet()) {
for (Entry<String, SingleGroupSource> entry : config.getGroupConfig().getGroups().entrySet()) {
supportsIncrementalBucketUpdate |= entry.getValue().supportsIncrementalBucketUpdate();
}
@ -83,13 +83,18 @@ public class Pivot {
client.execute(SearchAction.INSTANCE, searchRequest, ActionListener.wrap(response -> {
if (response == null) {
listener.onFailure(new ElasticsearchStatusException("Unexpected null response from test query",
RestStatus.SERVICE_UNAVAILABLE));
listener.onFailure(
new ElasticsearchStatusException("Unexpected null response from test query", RestStatus.SERVICE_UNAVAILABLE)
);
return;
}
if (response.status() != RestStatus.OK) {
listener.onFailure(new ElasticsearchStatusException("Unexpected status from response of test query: " + response.status(),
response.status()));
listener.onFailure(
new ElasticsearchStatusException(
"Unexpected status from response of test query: " + response.status(),
response.status()
)
);
return;
}
listener.onResponse(true);
@ -128,6 +133,8 @@ public class Pivot {
sourceBuilder.query(queryBuilder);
searchRequest.source(sourceBuilder);
searchRequest.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN);
logger.trace("Search request: {}", searchRequest);
return searchRequest;
}
@ -149,7 +156,7 @@ public class Pivot {
public Map<String, Set<String>> initialIncrementalBucketUpdateMap() {
Map<String, Set<String>> changedBuckets = new HashMap<>();
for(Entry<String, SingleGroupSource> entry: config.getGroupConfig().getGroups().entrySet()) {
for (Entry<String, SingleGroupSource> entry : config.getGroupConfig().getGroups().entrySet()) {
if (entry.getValue().supportsIncrementalBucketUpdate()) {
changedBuckets.put(entry.getKey(), new HashSet<>());
}
@ -162,20 +169,24 @@ public class Pivot {
return supportsIncrementalBucketUpdate;
}
public Stream<Map<String, Object>> extractResults(CompositeAggregation agg,
Map<String, String> fieldTypeMap,
TransformIndexerStats transformIndexerStats) {
public Stream<Map<String, Object>> extractResults(
CompositeAggregation agg,
Map<String, String> fieldTypeMap,
TransformIndexerStats transformIndexerStats
) {
GroupConfig groups = config.getGroupConfig();
Collection<AggregationBuilder> aggregationBuilders = config.getAggregationConfig().getAggregatorFactories();
Collection<PipelineAggregationBuilder> pipelineAggregationBuilders = config.getAggregationConfig().getPipelineAggregatorFactories();
return AggregationResultUtils.extractCompositeAggregationResults(agg,
return AggregationResultUtils.extractCompositeAggregationResults(
agg,
groups,
aggregationBuilders,
pipelineAggregationBuilders,
fieldTypeMap,
transformIndexerStats);
transformIndexerStats
);
}
public QueryBuilder filterBuckets(Map<String, Set<String>> changedBuckets) {
@ -235,8 +246,10 @@ public class Pivot {
try (XContentBuilder builder = jsonBuilder()) {
config.toCompositeAggXContent(builder, forChangeDetection);
XContentParser parser = builder.generator().contentType().xContent().createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput());
XContentParser parser = builder.generator()
.contentType()
.xContent()
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput());
compositeAggregation = CompositeAggregationBuilder.PARSER.parse(parser, COMPOSITE_AGGREGATION_NAME);
} catch (IOException e) {
throw new RuntimeException(TransformMessages.TRANSFORM_PIVOT_FAILED_TO_CREATE_COMPOSITE_AGGREGATION, e);