parent
00997b4f60
commit
586453fef1
|
@ -56,7 +56,6 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
public static final ParseField FREQUENCY = new ParseField("frequency");
|
||||
public static final ParseField INDEXES = new ParseField("indexes");
|
||||
public static final ParseField INDICES = new ParseField("indices");
|
||||
public static final ParseField TYPES = new ParseField("types");
|
||||
public static final ParseField QUERY = new ParseField("query");
|
||||
public static final ParseField SCROLL_SIZE = new ParseField("scroll_size");
|
||||
public static final ParseField AGGREGATIONS = new ParseField("aggregations");
|
||||
|
@ -73,7 +72,6 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
|
||||
PARSER.declareStringArray(Builder::setIndices, INDEXES);
|
||||
PARSER.declareStringArray(Builder::setIndices, INDICES);
|
||||
PARSER.declareStringArray(Builder::setTypes, TYPES);
|
||||
PARSER.declareString((builder, val) ->
|
||||
builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY);
|
||||
PARSER.declareString((builder, val) ->
|
||||
|
@ -103,7 +101,6 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
private final TimeValue queryDelay;
|
||||
private final TimeValue frequency;
|
||||
private final List<String> indices;
|
||||
private final List<String> types;
|
||||
private final BytesReference query;
|
||||
private final BytesReference aggregations;
|
||||
private final List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
|
@ -112,15 +109,14 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
private final DelayedDataCheckConfig delayedDataCheckConfig;
|
||||
|
||||
|
||||
private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, List<String> types,
|
||||
BytesReference query, BytesReference aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
|
||||
Integer scrollSize, ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) {
|
||||
private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, BytesReference query,
|
||||
BytesReference aggregations, List<SearchSourceBuilder.ScriptField> scriptFields, Integer scrollSize,
|
||||
ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) {
|
||||
this.id = id;
|
||||
this.jobId = jobId;
|
||||
this.queryDelay = queryDelay;
|
||||
this.frequency = frequency;
|
||||
this.indices = indices == null ? null : Collections.unmodifiableList(indices);
|
||||
this.types = types == null ? null : Collections.unmodifiableList(types);
|
||||
this.query = query;
|
||||
this.aggregations = aggregations;
|
||||
this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields);
|
||||
|
@ -149,10 +145,6 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
return indices;
|
||||
}
|
||||
|
||||
public List<String> getTypes() {
|
||||
return types;
|
||||
}
|
||||
|
||||
public Integer getScrollSize() {
|
||||
return scrollSize;
|
||||
}
|
||||
|
@ -191,9 +183,6 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
if (indices != null) {
|
||||
builder.field(INDICES.getPreferredName(), indices);
|
||||
}
|
||||
if (types != null) {
|
||||
builder.field(TYPES.getPreferredName(), types);
|
||||
}
|
||||
if (query != null) {
|
||||
builder.field(QUERY.getPreferredName(), asMap(query));
|
||||
}
|
||||
|
@ -251,7 +240,6 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
&& Objects.equals(this.frequency, that.frequency)
|
||||
&& Objects.equals(this.queryDelay, that.queryDelay)
|
||||
&& Objects.equals(this.indices, that.indices)
|
||||
&& Objects.equals(this.types, that.types)
|
||||
&& Objects.equals(asMap(this.query), asMap(that.query))
|
||||
&& Objects.equals(this.scrollSize, that.scrollSize)
|
||||
&& Objects.equals(asMap(this.aggregations), asMap(that.aggregations))
|
||||
|
@ -267,7 +255,7 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, jobId, frequency, queryDelay, indices, types, asMap(query), scrollSize, asMap(aggregations), scriptFields,
|
||||
return Objects.hash(id, jobId, frequency, queryDelay, indices, asMap(query), scrollSize, asMap(aggregations), scriptFields,
|
||||
chunkingConfig, delayedDataCheckConfig);
|
||||
}
|
||||
|
||||
|
@ -282,7 +270,6 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
private TimeValue queryDelay;
|
||||
private TimeValue frequency;
|
||||
private List<String> indices;
|
||||
private List<String> types;
|
||||
private BytesReference query;
|
||||
private BytesReference aggregations;
|
||||
private List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
|
@ -301,7 +288,6 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
this.queryDelay = config.queryDelay;
|
||||
this.frequency = config.frequency;
|
||||
this.indices = config.indices == null ? null : new ArrayList<>(config.indices);
|
||||
this.types = config.types == null ? null : new ArrayList<>(config.types);
|
||||
this.query = config.query;
|
||||
this.aggregations = config.aggregations;
|
||||
this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields);
|
||||
|
@ -319,11 +305,6 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
return setIndices(Arrays.asList(indices));
|
||||
}
|
||||
|
||||
public Builder setTypes(List<String> types) {
|
||||
this.types = types;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setQueryDelay(TimeValue queryDelay) {
|
||||
this.queryDelay = queryDelay;
|
||||
return this;
|
||||
|
@ -396,7 +377,7 @@ public class DatafeedConfig implements ToXContentObject {
|
|||
}
|
||||
|
||||
public DatafeedConfig build() {
|
||||
return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize,
|
||||
return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize,
|
||||
chunkingConfig, delayedDataCheckConfig);
|
||||
}
|
||||
|
||||
|
|
|
@ -60,7 +60,6 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
PARSER.declareString(Builder::setJobId, Job.ID);
|
||||
PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES);
|
||||
PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES);
|
||||
PARSER.declareStringArray(Builder::setTypes, DatafeedConfig.TYPES);
|
||||
PARSER.declareString((builder, val) -> builder.setQueryDelay(
|
||||
TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), DatafeedConfig.QUERY_DELAY);
|
||||
PARSER.declareString((builder, val) -> builder.setFrequency(
|
||||
|
@ -93,7 +92,6 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
private final TimeValue queryDelay;
|
||||
private final TimeValue frequency;
|
||||
private final List<String> indices;
|
||||
private final List<String> types;
|
||||
private final BytesReference query;
|
||||
private final BytesReference aggregations;
|
||||
private final List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
|
@ -101,15 +99,14 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
private final ChunkingConfig chunkingConfig;
|
||||
private final DelayedDataCheckConfig delayedDataCheckConfig;
|
||||
|
||||
private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, List<String> types,
|
||||
BytesReference query, BytesReference aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
|
||||
Integer scrollSize, ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) {
|
||||
private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, BytesReference query,
|
||||
BytesReference aggregations, List<SearchSourceBuilder.ScriptField> scriptFields, Integer scrollSize,
|
||||
ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) {
|
||||
this.id = id;
|
||||
this.jobId = jobId;
|
||||
this.queryDelay = queryDelay;
|
||||
this.frequency = frequency;
|
||||
this.indices = indices;
|
||||
this.types = types;
|
||||
this.query = query;
|
||||
this.aggregations = aggregations;
|
||||
this.scriptFields = scriptFields;
|
||||
|
@ -143,7 +140,6 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
if (aggregations != null) {
|
||||
builder.field(DatafeedConfig.AGGREGATIONS.getPreferredName(), asMap(aggregations));
|
||||
}
|
||||
addOptionalField(builder, DatafeedConfig.TYPES, types);
|
||||
if (scriptFields != null) {
|
||||
builder.startObject(DatafeedConfig.SCRIPT_FIELDS.getPreferredName());
|
||||
for (SearchSourceBuilder.ScriptField scriptField : scriptFields) {
|
||||
|
@ -182,10 +178,6 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
return indices;
|
||||
}
|
||||
|
||||
public List<String> getTypes() {
|
||||
return types;
|
||||
}
|
||||
|
||||
public Integer getScrollSize() {
|
||||
return scrollSize;
|
||||
}
|
||||
|
@ -240,7 +232,6 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
&& Objects.equals(this.frequency, that.frequency)
|
||||
&& Objects.equals(this.queryDelay, that.queryDelay)
|
||||
&& Objects.equals(this.indices, that.indices)
|
||||
&& Objects.equals(this.types, that.types)
|
||||
&& Objects.equals(asMap(this.query), asMap(that.query))
|
||||
&& Objects.equals(this.scrollSize, that.scrollSize)
|
||||
&& Objects.equals(asMap(this.aggregations), asMap(that.aggregations))
|
||||
|
@ -256,7 +247,7 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, jobId, frequency, queryDelay, indices, types, asMap(query), scrollSize, asMap(aggregations), scriptFields,
|
||||
return Objects.hash(id, jobId, frequency, queryDelay, indices, asMap(query), scrollSize, asMap(aggregations), scriptFields,
|
||||
chunkingConfig, delayedDataCheckConfig);
|
||||
}
|
||||
|
||||
|
@ -271,7 +262,6 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
private TimeValue queryDelay;
|
||||
private TimeValue frequency;
|
||||
private List<String> indices;
|
||||
private List<String> types;
|
||||
private BytesReference query;
|
||||
private BytesReference aggregations;
|
||||
private List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
|
@ -289,7 +279,6 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
this.queryDelay = config.queryDelay;
|
||||
this.frequency = config.frequency;
|
||||
this.indices = config.indices;
|
||||
this.types = config.types;
|
||||
this.query = config.query;
|
||||
this.aggregations = config.aggregations;
|
||||
this.scriptFields = config.scriptFields;
|
||||
|
@ -312,11 +301,6 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
return setIndices(Arrays.asList(indices));
|
||||
}
|
||||
|
||||
public Builder setTypes(List<String> types) {
|
||||
this.types = types;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setQueryDelay(TimeValue queryDelay) {
|
||||
this.queryDelay = queryDelay;
|
||||
return this;
|
||||
|
@ -380,7 +364,7 @@ public class DatafeedUpdate implements ToXContentObject {
|
|||
}
|
||||
|
||||
public DatafeedUpdate build() {
|
||||
return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize,
|
||||
return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize,
|
||||
chunkingConfig, delayedDataCheckConfig);
|
||||
}
|
||||
|
||||
|
|
|
@ -558,7 +558,6 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId)
|
||||
.setIndices(indexName)
|
||||
.setQueryDelay(TimeValue.timeValueSeconds(1))
|
||||
.setTypes(Arrays.asList("_doc"))
|
||||
.setFrequency(TimeValue.timeValueSeconds(1)).build();
|
||||
machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||
|
||||
|
@ -768,7 +767,6 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId)
|
||||
.setIndices(indexName)
|
||||
.setQueryDelay(TimeValue.timeValueSeconds(1))
|
||||
.setTypes(Collections.singletonList("_doc"))
|
||||
.setFrequency(TimeValue.timeValueSeconds(1)).build();
|
||||
machineLearningClient.putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||
|
||||
|
@ -1403,7 +1401,6 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
|
|||
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId)
|
||||
.setIndices(indexName)
|
||||
.setQueryDelay(TimeValue.timeValueSeconds(1))
|
||||
.setTypes(Arrays.asList("_doc"))
|
||||
.setFrequency(TimeValue.timeValueSeconds(1)).build();
|
||||
highLevelClient().machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||
return datafeedId;
|
||||
|
|
|
@ -872,7 +872,6 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
createIndexRequest.mapping("_doc", "timestamp", "type=date", "total", "type=long");
|
||||
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId())
|
||||
.setTypes(Arrays.asList("_doc"))
|
||||
.setIndices(indexName)
|
||||
.build();
|
||||
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||
|
@ -932,7 +931,6 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
createIndexRequest.mapping("_doc", "timestamp", "type=date", "total", "type=long");
|
||||
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId())
|
||||
.setTypes(Arrays.asList("_doc"))
|
||||
.setIndices(indexName)
|
||||
.build();
|
||||
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||
|
@ -1053,14 +1051,12 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||
createIndexRequest.mapping("_doc", "timestamp", "type=date", "total", "type=long");
|
||||
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId1, job.getId())
|
||||
.setTypes(Arrays.asList("_doc"))
|
||||
.setIndices(indexName)
|
||||
.build();
|
||||
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
|
||||
|
||||
String datafeedId2 = secondJob.getId() + "-feed";
|
||||
DatafeedConfig secondDatafeed = DatafeedConfig.builder(datafeedId2, secondJob.getId())
|
||||
.setTypes(Arrays.asList("_doc"))
|
||||
.setIndices(indexName)
|
||||
.build();
|
||||
client.machineLearning().putDatafeed(new PutDatafeedRequest(secondDatafeed), RequestOptions.DEFAULT);
|
||||
|
|
|
@ -51,7 +51,6 @@ public class DatafeedConfigTests extends AbstractXContentTestCase<DatafeedConfig
|
|||
long bucketSpanMillis = 3600000;
|
||||
DatafeedConfig.Builder builder = constructBuilder();
|
||||
builder.setIndices(randomStringList(1, 10));
|
||||
builder.setTypes(randomStringList(0, 10));
|
||||
if (randomBoolean()) {
|
||||
try {
|
||||
builder.setQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10)));
|
||||
|
|
|
@ -46,9 +46,6 @@ public class DatafeedUpdateTests extends AbstractXContentTestCase<DatafeedUpdate
|
|||
if (randomBoolean()) {
|
||||
builder.setIndices(DatafeedConfigTests.randomStringList(1, 10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setTypes(DatafeedConfigTests.randomStringList(1, 10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
try {
|
||||
builder.setQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10)));
|
||||
|
|
|
@ -59,7 +59,6 @@ PUT _ml/datafeeds/datafeed-farequote
|
|||
{
|
||||
"job_id":"farequote",
|
||||
"indices": ["farequote"],
|
||||
"types": ["response"],
|
||||
"aggregations": {
|
||||
"buckets": {
|
||||
"date_histogram": {
|
||||
|
|
|
@ -59,11 +59,6 @@ A {dfeed} resource has the following properties:
|
|||
(unsigned integer) The `size` parameter that is used in {es} searches.
|
||||
The default value is `1000`.
|
||||
|
||||
`types`::
|
||||
(array) A list of types to search for within the specified indices. For
|
||||
example: `[]`. This property is provided for backwards compatibility with
|
||||
releases earlier than 6.0.0. For more information, see <<removal-of-types>>.
|
||||
|
||||
`delayed_data_check_config`::
|
||||
(object) Specifies whether the data feed checks for missing data and
|
||||
and the size of the window. For example:
|
||||
|
|
|
@ -76,7 +76,6 @@ The API returns the following results:
|
|||
"indices": [
|
||||
"server-metrics"
|
||||
],
|
||||
"types": [],
|
||||
"query": {
|
||||
"match_all": {
|
||||
"boost": 1.0
|
||||
|
|
|
@ -73,11 +73,6 @@ You must create a job before you create a {dfeed}. You can associate only one
|
|||
(unsigned integer) The `size` parameter that is used in {es} searches.
|
||||
The default value is `1000`.
|
||||
|
||||
`types`::
|
||||
(array) A list of types to search for within the specified indices.
|
||||
For example: `[]`. This property is provided for backwards compatibility with
|
||||
releases earlier than 6.0.0. For more information, see <<removal-of-types>>.
|
||||
|
||||
`delayed_data_check_config`::
|
||||
(object) Specifies if and with how large a window should the data feed check
|
||||
for missing data. See <<ml-datafeed-delayed-data-check-config>>.
|
||||
|
@ -125,7 +120,6 @@ When the {dfeed} is created, you receive the following results:
|
|||
"indices": [
|
||||
"server-metrics"
|
||||
],
|
||||
"types": [],
|
||||
"query": {
|
||||
"match_all": {
|
||||
"boost": 1.0
|
||||
|
|
|
@ -68,11 +68,6 @@ The following properties can be updated after the {dfeed} is created:
|
|||
(unsigned integer) The `size` parameter that is used in {es} searches.
|
||||
The default value is `1000`.
|
||||
|
||||
`types`::
|
||||
(array) A list of types to search for within the specified indices.
|
||||
For example: `[]`. This property is provided for backwards compatibility with
|
||||
releases earlier than 6.0.0. For more information, see <<removal-of-types>>.
|
||||
|
||||
For more information about these properties,
|
||||
see <<ml-datafeed-resource>>.
|
||||
|
||||
|
@ -120,7 +115,6 @@ with the updated values:
|
|||
"job_id": "total-requests",
|
||||
"query_delay": "83474ms",
|
||||
"indices": ["server-metrics"],
|
||||
"types": [],
|
||||
"query": {
|
||||
"term": {
|
||||
"level": {
|
||||
|
|
|
@ -127,7 +127,6 @@ PUT _ml/datafeeds/datafeed-test1
|
|||
{
|
||||
"job_id": "test1",
|
||||
"indices": ["my_index"],
|
||||
"types": ["_doc"],
|
||||
"query": {
|
||||
"match_all": {
|
||||
"boost": 1
|
||||
|
@ -233,7 +232,6 @@ PUT _ml/datafeeds/datafeed-test2
|
|||
{
|
||||
"job_id": "test2",
|
||||
"indices": ["my_index"],
|
||||
"types": ["_doc"],
|
||||
"query": {
|
||||
"match_all": {
|
||||
"boost": 1
|
||||
|
@ -482,7 +480,6 @@ PUT _ml/datafeeds/datafeed-test3
|
|||
{
|
||||
"job_id": "test3",
|
||||
"indices": ["my_index"],
|
||||
"types": ["_doc"],
|
||||
"query": {
|
||||
"match_all": {
|
||||
"boost": 1
|
||||
|
@ -551,7 +548,6 @@ PUT _ml/datafeeds/datafeed-test4
|
|||
{
|
||||
"job_id": "test4",
|
||||
"indices": ["my_index"],
|
||||
"types": ["_doc"],
|
||||
"query": {
|
||||
"match_all": {
|
||||
"boost": 1
|
||||
|
|
|
@ -124,7 +124,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
public static final ParseField FREQUENCY = new ParseField("frequency");
|
||||
public static final ParseField INDEXES = new ParseField("indexes");
|
||||
public static final ParseField INDICES = new ParseField("indices");
|
||||
public static final ParseField TYPES = new ParseField("types");
|
||||
public static final ParseField QUERY = new ParseField("query");
|
||||
public static final ParseField SCROLL_SIZE = new ParseField("scroll_size");
|
||||
public static final ParseField AGGREGATIONS = new ParseField("aggregations");
|
||||
|
@ -161,7 +160,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
parser.declareString(Builder::setJobId, Job.ID);
|
||||
parser.declareStringArray(Builder::setIndices, INDEXES);
|
||||
parser.declareStringArray(Builder::setIndices, INDICES);
|
||||
parser.declareStringArray(Builder::setTypes, TYPES);
|
||||
parser.declareString((builder, val) ->
|
||||
builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY);
|
||||
parser.declareString((builder, val) ->
|
||||
|
@ -212,7 +210,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
private final TimeValue frequency;
|
||||
|
||||
private final List<String> indices;
|
||||
private final List<String> types;
|
||||
private final Map<String, Object> query;
|
||||
private final Map<String, Object> aggregations;
|
||||
private final List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
|
@ -223,7 +220,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
private final CachedSupplier<QueryBuilder> querySupplier;
|
||||
private final CachedSupplier<AggregatorFactories.Builder> aggSupplier;
|
||||
|
||||
private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, List<String> types,
|
||||
private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices,
|
||||
Map<String, Object> query, Map<String, Object> aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
|
||||
Integer scrollSize, ChunkingConfig chunkingConfig, Map<String, String> headers,
|
||||
DelayedDataCheckConfig delayedDataCheckConfig) {
|
||||
|
@ -232,7 +229,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
this.queryDelay = queryDelay;
|
||||
this.frequency = frequency;
|
||||
this.indices = indices == null ? null : Collections.unmodifiableList(indices);
|
||||
this.types = types == null ? null : Collections.unmodifiableList(types);
|
||||
this.query = query == null ? null : Collections.unmodifiableMap(query);
|
||||
this.aggregations = aggregations == null ? null : Collections.unmodifiableMap(aggregations);
|
||||
this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields);
|
||||
|
@ -254,10 +250,11 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
} else {
|
||||
this.indices = null;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
this.types = Collections.unmodifiableList(in.readList(StreamInput::readString));
|
||||
} else {
|
||||
this.types = null;
|
||||
// This consumes the list of types if there was one.
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
if (in.readBoolean()) {
|
||||
in.readList(StreamInput::readString);
|
||||
}
|
||||
}
|
||||
if (in.getVersion().before(Version.V_6_6_0)) {
|
||||
this.query = QUERY_TRANSFORMER.toMap(in.readNamedWriteable(QueryBuilder.class));
|
||||
|
@ -325,10 +322,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
return indices;
|
||||
}
|
||||
|
||||
public List<String> getTypes() {
|
||||
return types;
|
||||
}
|
||||
|
||||
public Integer getScrollSize() {
|
||||
return scrollSize;
|
||||
}
|
||||
|
@ -419,11 +412,11 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
if (types != null) {
|
||||
// Write the now removed types to prior versions.
|
||||
// An empty list is expected
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeBoolean(true);
|
||||
out.writeStringList(types);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
out.writeStringList(Collections.emptyList());
|
||||
}
|
||||
if (out.getVersion().before(Version.V_6_6_0)) {
|
||||
out.writeNamedWriteable(getParsedQuery());
|
||||
|
@ -464,7 +457,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep());
|
||||
}
|
||||
builder.field(INDICES.getPreferredName(), indices);
|
||||
builder.field(TYPES.getPreferredName(), types);
|
||||
builder.field(QUERY.getPreferredName(), query);
|
||||
if (aggregations != null) {
|
||||
builder.field(AGGREGATIONS.getPreferredName(), aggregations);
|
||||
|
@ -512,7 +504,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
&& Objects.equals(this.frequency, that.frequency)
|
||||
&& Objects.equals(this.queryDelay, that.queryDelay)
|
||||
&& Objects.equals(this.indices, that.indices)
|
||||
&& Objects.equals(this.types, that.types)
|
||||
&& Objects.equals(this.query, that.query)
|
||||
&& Objects.equals(this.scrollSize, that.scrollSize)
|
||||
&& Objects.equals(this.aggregations, that.aggregations)
|
||||
|
@ -524,8 +515,8 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, jobId, frequency, queryDelay, indices, types, query, scrollSize, aggregations, scriptFields,
|
||||
chunkingConfig, headers, delayedDataCheckConfig);
|
||||
return Objects.hash(id, jobId, frequency, queryDelay, indices, query, scrollSize, aggregations, scriptFields, chunkingConfig,
|
||||
headers, delayedDataCheckConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -591,7 +582,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
private TimeValue queryDelay;
|
||||
private TimeValue frequency;
|
||||
private List<String> indices = Collections.emptyList();
|
||||
private List<String> types = Collections.emptyList();
|
||||
private Map<String, Object> query;
|
||||
private Map<String, Object> aggregations;
|
||||
private List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
|
@ -618,7 +608,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
this.queryDelay = config.queryDelay;
|
||||
this.frequency = config.frequency;
|
||||
this.indices = new ArrayList<>(config.indices);
|
||||
this.types = new ArrayList<>(config.types);
|
||||
this.query = config.query == null ? null : new LinkedHashMap<>(config.query);
|
||||
this.aggregations = config.aggregations == null ? null : new LinkedHashMap<>(config.aggregations);
|
||||
this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields);
|
||||
|
@ -648,10 +637,6 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
this.indices = ExceptionsHelper.requireNonNull(indices, INDICES.getPreferredName());
|
||||
}
|
||||
|
||||
public void setTypes(List<String> types) {
|
||||
this.types = ExceptionsHelper.requireNonNull(types, TYPES.getPreferredName());
|
||||
}
|
||||
|
||||
public void setQueryDelay(TimeValue queryDelay) {
|
||||
TimeUtils.checkNonNegativeMultiple(queryDelay, TimeUnit.MILLISECONDS, QUERY_DELAY);
|
||||
this.queryDelay = queryDelay;
|
||||
|
@ -741,15 +726,12 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
|
|||
if (indices == null || indices.isEmpty() || indices.contains(null) || indices.contains("")) {
|
||||
throw invalidOptionValue(INDICES.getPreferredName(), indices);
|
||||
}
|
||||
if (types == null || types.contains(null) || types.contains("")) {
|
||||
throw invalidOptionValue(TYPES.getPreferredName(), types);
|
||||
}
|
||||
|
||||
validateScriptFields();
|
||||
setDefaultChunkingConfig();
|
||||
|
||||
setDefaultQueryDelay();
|
||||
return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize,
|
||||
return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize,
|
||||
chunkingConfig, headers, delayedDataCheckConfig);
|
||||
}
|
||||
|
||||
|
|
|
@ -48,7 +48,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
PARSER.declareString(Builder::setJobId, Job.ID);
|
||||
PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES);
|
||||
PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES);
|
||||
PARSER.declareStringArray(Builder::setTypes, DatafeedConfig.TYPES);
|
||||
PARSER.declareString((builder, val) -> builder.setQueryDelay(
|
||||
TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), DatafeedConfig.QUERY_DELAY);
|
||||
PARSER.declareString((builder, val) -> builder.setFrequency(
|
||||
|
@ -79,7 +78,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
private final TimeValue queryDelay;
|
||||
private final TimeValue frequency;
|
||||
private final List<String> indices;
|
||||
private final List<String> types;
|
||||
private final QueryBuilder query;
|
||||
private final AggregatorFactories.Builder aggregations;
|
||||
private final List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
|
@ -87,15 +85,14 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
private final ChunkingConfig chunkingConfig;
|
||||
private final DelayedDataCheckConfig delayedDataCheckConfig;
|
||||
|
||||
private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, List<String> types,
|
||||
QueryBuilder query, AggregatorFactories.Builder aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
|
||||
Integer scrollSize, ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) {
|
||||
private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, QueryBuilder query,
|
||||
AggregatorFactories.Builder aggregations, List<SearchSourceBuilder.ScriptField> scriptFields, Integer scrollSize,
|
||||
ChunkingConfig chunkingConfig, DelayedDataCheckConfig delayedDataCheckConfig) {
|
||||
this.id = id;
|
||||
this.jobId = jobId;
|
||||
this.queryDelay = queryDelay;
|
||||
this.frequency = frequency;
|
||||
this.indices = indices;
|
||||
this.types = types;
|
||||
this.query = query;
|
||||
this.aggregations = aggregations;
|
||||
this.scriptFields = scriptFields;
|
||||
|
@ -114,10 +111,11 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
} else {
|
||||
this.indices = null;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
this.types = in.readList(StreamInput::readString);
|
||||
} else {
|
||||
this.types = null;
|
||||
// This consumes the list of types if there was one.
|
||||
if (in.getVersion().before(Version.V_7_0_0)) {
|
||||
if (in.readBoolean()) {
|
||||
in.readList(StreamInput::readString);
|
||||
}
|
||||
}
|
||||
this.query = in.readOptionalNamedWriteable(QueryBuilder.class);
|
||||
this.aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new);
|
||||
|
@ -154,11 +152,11 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
if (types != null) {
|
||||
// Write the now removed types to prior versions.
|
||||
// An empty list is expected
|
||||
if (out.getVersion().before(Version.V_7_0_0)) {
|
||||
out.writeBoolean(true);
|
||||
out.writeStringList(types);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
out.writeStringList(Collections.emptyList());
|
||||
}
|
||||
out.writeOptionalNamedWriteable(query);
|
||||
out.writeOptionalWriteable(aggregations);
|
||||
|
@ -187,7 +185,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
builder.field(DatafeedConfig.FREQUENCY.getPreferredName(), frequency.getStringRep());
|
||||
}
|
||||
addOptionalField(builder, DatafeedConfig.INDICES, indices);
|
||||
addOptionalField(builder, DatafeedConfig.TYPES, types);
|
||||
addOptionalField(builder, DatafeedConfig.QUERY, query);
|
||||
addOptionalField(builder, DatafeedConfig.AGGREGATIONS, aggregations);
|
||||
if (scriptFields != null) {
|
||||
|
@ -226,10 +223,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
return indices;
|
||||
}
|
||||
|
||||
List<String> getTypes() {
|
||||
return types;
|
||||
}
|
||||
|
||||
Integer getScrollSize() {
|
||||
return scrollSize;
|
||||
}
|
||||
|
@ -291,9 +284,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
if (indices != null) {
|
||||
builder.setIndices(indices);
|
||||
}
|
||||
if (types != null) {
|
||||
builder.setTypes(types);
|
||||
}
|
||||
if (query != null) {
|
||||
builder.setParsedQuery(query);
|
||||
}
|
||||
|
@ -347,7 +337,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
&& Objects.equals(this.frequency, that.frequency)
|
||||
&& Objects.equals(this.queryDelay, that.queryDelay)
|
||||
&& Objects.equals(this.indices, that.indices)
|
||||
&& Objects.equals(this.types, that.types)
|
||||
&& Objects.equals(this.query, that.query)
|
||||
&& Objects.equals(this.scrollSize, that.scrollSize)
|
||||
&& Objects.equals(this.aggregations, that.aggregations)
|
||||
|
@ -358,8 +347,8 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(id, jobId, frequency, queryDelay, indices, types, query, scrollSize, aggregations, scriptFields,
|
||||
chunkingConfig, delayedDataCheckConfig);
|
||||
return Objects.hash(id, jobId, frequency, queryDelay, indices, query, scrollSize, aggregations, scriptFields, chunkingConfig,
|
||||
delayedDataCheckConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -371,7 +360,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
return (frequency == null || Objects.equals(frequency, datafeed.getFrequency()))
|
||||
&& (queryDelay == null || Objects.equals(queryDelay, datafeed.getQueryDelay()))
|
||||
&& (indices == null || Objects.equals(indices, datafeed.getIndices()))
|
||||
&& (types == null || Objects.equals(types, datafeed.getTypes()))
|
||||
&& (query == null || Objects.equals(query, datafeed.getParsedQuery()))
|
||||
&& (scrollSize == null || Objects.equals(scrollSize, datafeed.getQueryDelay()))
|
||||
&& (aggregations == null || Objects.equals(aggregations, datafeed.getParsedAggregations()))
|
||||
|
@ -387,7 +375,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
private TimeValue queryDelay;
|
||||
private TimeValue frequency;
|
||||
private List<String> indices;
|
||||
private List<String> types;
|
||||
private QueryBuilder query;
|
||||
private AggregatorFactories.Builder aggregations;
|
||||
private List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
|
@ -408,7 +395,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
this.queryDelay = config.queryDelay;
|
||||
this.frequency = config.frequency;
|
||||
this.indices = config.indices;
|
||||
this.types = config.types;
|
||||
this.query = config.query;
|
||||
this.aggregations = config.aggregations;
|
||||
this.scriptFields = config.scriptFields;
|
||||
|
@ -429,10 +415,6 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
this.indices = indices;
|
||||
}
|
||||
|
||||
public void setTypes(List<String> types) {
|
||||
this.types = types;
|
||||
}
|
||||
|
||||
public void setQueryDelay(TimeValue queryDelay) {
|
||||
this.queryDelay = queryDelay;
|
||||
}
|
||||
|
@ -468,7 +450,7 @@ public class DatafeedUpdate implements Writeable, ToXContentObject {
|
|||
}
|
||||
|
||||
public DatafeedUpdate build() {
|
||||
return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize,
|
||||
return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, query, aggregations, scriptFields, scrollSize,
|
||||
chunkingConfig, delayedDataCheckConfig);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -325,9 +325,6 @@ public class ElasticsearchMappings {
|
|||
.startObject(DatafeedConfig.INDICES.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(DatafeedConfig.TYPES.getPreferredName())
|
||||
.field(TYPE, KEYWORD)
|
||||
.endObject()
|
||||
.startObject(DatafeedConfig.QUERY.getPreferredName())
|
||||
.field(ENABLED, false)
|
||||
.endObject()
|
||||
|
|
|
@ -243,7 +243,6 @@ public final class ReservedFieldNames {
|
|||
DatafeedConfig.QUERY_DELAY.getPreferredName(),
|
||||
DatafeedConfig.FREQUENCY.getPreferredName(),
|
||||
DatafeedConfig.INDICES.getPreferredName(),
|
||||
DatafeedConfig.TYPES.getPreferredName(),
|
||||
DatafeedConfig.QUERY.getPreferredName(),
|
||||
DatafeedConfig.SCROLL_SIZE.getPreferredName(),
|
||||
DatafeedConfig.AGGREGATIONS.getPreferredName(),
|
||||
|
|
|
@ -31,7 +31,6 @@ public class PutDatafeedActionRequestTests extends AbstractStreamableXContentTes
|
|||
protected Request createTestInstance() {
|
||||
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, randomAlphaOfLength(10));
|
||||
datafeedConfig.setIndices(Collections.singletonList(randomAlphaOfLength(10)));
|
||||
datafeedConfig.setTypes(Collections.singletonList(randomAlphaOfLength(10)));
|
||||
return new Request(datafeedConfig.build());
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,6 @@ public class PutDatafeedActionResponseTests extends AbstractStreamableTestCase<R
|
|||
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(
|
||||
DatafeedConfigTests.randomValidDatafeedId(), randomAlphaOfLength(10));
|
||||
datafeedConfig.setIndices(Arrays.asList(randomAlphaOfLength(10)));
|
||||
datafeedConfig.setTypes(Arrays.asList(randomAlphaOfLength(10)));
|
||||
return new Response(datafeedConfig.build());
|
||||
}
|
||||
|
||||
|
|
|
@ -88,7 +88,6 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
private static DatafeedConfig.Builder createRandomizedDatafeedConfigBuilder(String jobId, long bucketSpanMillis) {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder(randomValidDatafeedId(), jobId);
|
||||
builder.setIndices(randomStringList(1, 10));
|
||||
builder.setTypes(randomStringList(0, 10));
|
||||
if (randomBoolean()) {
|
||||
builder.setParsedQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10)));
|
||||
}
|
||||
|
@ -396,7 +395,6 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
public void testBuild_GivenScriptFieldsAndAggregations() {
|
||||
DatafeedConfig.Builder datafeed = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
datafeed.setIndices(Collections.singletonList("my_index"));
|
||||
datafeed.setTypes(Collections.singletonList("my_type"));
|
||||
datafeed.setScriptFields(Collections.singletonList(new SearchSourceBuilder.ScriptField(randomAlphaOfLength(10),
|
||||
mockScript(randomAlphaOfLength(10)), randomBoolean())));
|
||||
datafeed.setParsedAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo")));
|
||||
|
@ -409,7 +407,6 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
public void testHasAggregations_GivenNull() {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
DatafeedConfig datafeedConfig = builder.build();
|
||||
|
||||
assertThat(datafeedConfig.hasAggregations(), is(false));
|
||||
|
@ -418,7 +415,6 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
public void testHasAggregations_NonEmpty() {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
|
||||
builder.setParsedAggregations(new AggregatorFactories.Builder().addAggregator(
|
||||
AggregationBuilders.dateHistogram("time").interval(300000).subAggregation(maxTime).field("time")));
|
||||
|
@ -430,7 +426,6 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
public void testBuild_GivenEmptyAggregations() {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
builder.setParsedAggregations(new AggregatorFactories.Builder());
|
||||
|
||||
ElasticsearchException e = expectThrows(ElasticsearchException.class, builder::build);
|
||||
|
@ -441,7 +436,6 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
public void testBuild_GivenHistogramWithDefaultInterval() {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
|
||||
builder.setParsedAggregations(new AggregatorFactories.Builder().addAggregator(
|
||||
AggregationBuilders.histogram("time").subAggregation(maxTime).field("time"))
|
||||
|
@ -770,7 +764,6 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
private static DatafeedConfig.Builder createDatafeedBuilderWithDateHistogram(DateHistogramAggregationBuilder dateHistogram) {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
AggregatorFactories.Builder aggs = new AggregatorFactories.Builder().addAggregator(dateHistogram);
|
||||
DatafeedConfig.validateAggregations(aggs);
|
||||
builder.setParsedAggregations(aggs);
|
||||
|
@ -784,7 +777,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
@Override
|
||||
protected DatafeedConfig mutateInstance(DatafeedConfig instance) throws IOException {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder(instance);
|
||||
switch (between(0, 10)) {
|
||||
switch (between(0, 9)) {
|
||||
case 0:
|
||||
builder.setId(instance.getId() + randomValidDatafeedId());
|
||||
break;
|
||||
|
@ -807,11 +800,6 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
builder.setIndices(indices);
|
||||
break;
|
||||
case 5:
|
||||
List<String> types = new ArrayList<>(instance.getTypes());
|
||||
types.add(randomAlphaOfLengthBetween(1, 20));
|
||||
builder.setTypes(types);
|
||||
break;
|
||||
case 6:
|
||||
BoolQueryBuilder query = new BoolQueryBuilder();
|
||||
if (instance.getParsedQuery() != null) {
|
||||
query.must(instance.getParsedQuery());
|
||||
|
@ -819,7 +807,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
query.filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
|
||||
builder.setParsedQuery(query);
|
||||
break;
|
||||
case 7:
|
||||
case 6:
|
||||
if (instance.hasAggregations()) {
|
||||
builder.setAggregations(null);
|
||||
} else {
|
||||
|
@ -834,16 +822,16 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
|
|||
}
|
||||
}
|
||||
break;
|
||||
case 8:
|
||||
case 7:
|
||||
ArrayList<ScriptField> scriptFields = new ArrayList<>(instance.getScriptFields());
|
||||
scriptFields.add(new ScriptField(randomAlphaOfLengthBetween(1, 10), new Script("foo"), true));
|
||||
builder.setScriptFields(scriptFields);
|
||||
builder.setAggregations(null);
|
||||
break;
|
||||
case 9:
|
||||
case 8:
|
||||
builder.setScrollSize(instance.getScrollSize() + between(1, 100));
|
||||
break;
|
||||
case 10:
|
||||
case 9:
|
||||
if (instance.getChunkingConfig() == null || instance.getChunkingConfig().getMode() == Mode.AUTO) {
|
||||
ChunkingConfig newChunkingConfig = ChunkingConfig.newManual(new TimeValue(randomNonNegativeLong()));
|
||||
builder.setChunkingConfig(newChunkingConfig);
|
||||
|
|
|
@ -60,9 +60,6 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
|
|||
if (randomBoolean()) {
|
||||
builder.setIndices(DatafeedConfigTests.randomStringList(1, 10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setTypes(DatafeedConfigTests.randomStringList(1, 10));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10)));
|
||||
}
|
||||
|
@ -145,13 +142,11 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
|
|||
public void testApply_givenFullUpdateNoAggregations() {
|
||||
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("foo", "foo-feed");
|
||||
datafeedBuilder.setIndices(Collections.singletonList("i_1"));
|
||||
datafeedBuilder.setTypes(Collections.singletonList("t_1"));
|
||||
DatafeedConfig datafeed = datafeedBuilder.build();
|
||||
|
||||
DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeed.getId());
|
||||
update.setJobId("bar");
|
||||
update.setIndices(Collections.singletonList("i_2"));
|
||||
update.setTypes(Collections.singletonList("t_2"));
|
||||
update.setQueryDelay(TimeValue.timeValueSeconds(42));
|
||||
update.setFrequency(TimeValue.timeValueSeconds(142));
|
||||
update.setQuery(QueryBuilders.termQuery("a", "b"));
|
||||
|
@ -164,7 +159,6 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
|
|||
|
||||
assertThat(updatedDatafeed.getJobId(), equalTo("bar"));
|
||||
assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_2")));
|
||||
assertThat(updatedDatafeed.getTypes(), equalTo(Collections.singletonList("t_2")));
|
||||
assertThat(updatedDatafeed.getQueryDelay(), equalTo(TimeValue.timeValueSeconds(42)));
|
||||
assertThat(updatedDatafeed.getFrequency(), equalTo(TimeValue.timeValueSeconds(142)));
|
||||
assertThat(updatedDatafeed.getParsedQuery(), equalTo(QueryBuilders.termQuery("a", "b")));
|
||||
|
@ -180,7 +174,6 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
|
|||
public void testApply_givenAggregations() {
|
||||
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("foo", "foo-feed");
|
||||
datafeedBuilder.setIndices(Collections.singletonList("i_1"));
|
||||
datafeedBuilder.setTypes(Collections.singletonList("t_1"));
|
||||
DatafeedConfig datafeed = datafeedBuilder.build();
|
||||
|
||||
DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeed.getId());
|
||||
|
@ -191,7 +184,6 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
|
|||
DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap());
|
||||
|
||||
assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_1")));
|
||||
assertThat(updatedDatafeed.getTypes(), equalTo(Collections.singletonList("t_1")));
|
||||
assertThat(updatedDatafeed.getParsedAggregations(),
|
||||
equalTo(new AggregatorFactories.Builder().addAggregator(
|
||||
AggregationBuilders.histogram("a").interval(300000).field("time").subAggregation(maxTime))));
|
||||
|
@ -219,7 +211,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
|
|||
@Override
|
||||
protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) {
|
||||
DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder(instance);
|
||||
switch (between(0, 10)) {
|
||||
switch (between(0, 9)) {
|
||||
case 0:
|
||||
builder.setId(instance.getId() + DatafeedConfigTests.randomValidDatafeedId());
|
||||
break;
|
||||
|
@ -251,16 +243,6 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
|
|||
builder.setIndices(indices);
|
||||
break;
|
||||
case 5:
|
||||
List<String> types;
|
||||
if (instance.getTypes() == null) {
|
||||
types = new ArrayList<>();
|
||||
} else {
|
||||
types = new ArrayList<>(instance.getTypes());
|
||||
}
|
||||
types.add(randomAlphaOfLengthBetween(1, 20));
|
||||
builder.setTypes(types);
|
||||
break;
|
||||
case 6:
|
||||
BoolQueryBuilder query = new BoolQueryBuilder();
|
||||
if (instance.getQuery() != null) {
|
||||
query.must(instance.getQuery());
|
||||
|
@ -268,7 +250,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
|
|||
query.filter(new TermQueryBuilder(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
|
||||
builder.setQuery(query);
|
||||
break;
|
||||
case 7:
|
||||
case 6:
|
||||
if (instance.hasAggregations()) {
|
||||
builder.setAggregations(null);
|
||||
} else {
|
||||
|
@ -282,20 +264,20 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
|
|||
}
|
||||
}
|
||||
break;
|
||||
case 8:
|
||||
case 7:
|
||||
ArrayList<ScriptField> scriptFields = new ArrayList<>(instance.getScriptFields());
|
||||
scriptFields.add(new ScriptField(randomAlphaOfLengthBetween(1, 10), new Script("foo"), true));
|
||||
builder.setScriptFields(scriptFields);
|
||||
builder.setAggregations(null);
|
||||
break;
|
||||
case 9:
|
||||
case 8:
|
||||
if (instance.getScrollSize() == null) {
|
||||
builder.setScrollSize(between(1, 100));
|
||||
} else {
|
||||
builder.setScrollSize(instance.getScrollSize() + between(1, 100));
|
||||
}
|
||||
break;
|
||||
case 10:
|
||||
case 9:
|
||||
if (instance.getChunkingConfig() == null || instance.getChunkingConfig().getMode() == Mode.AUTO) {
|
||||
ChunkingConfig newChunkingConfig = ChunkingConfig.newManual(new TimeValue(randomNonNegativeLong()));
|
||||
builder.setChunkingConfig(newChunkingConfig);
|
||||
|
|
|
@ -265,7 +265,6 @@ public class MlBasicMultiNodeIT extends ESRestTestCase {
|
|||
xContentBuilder.startObject();
|
||||
xContentBuilder.field("job_id", jobId);
|
||||
xContentBuilder.array("indexes", "airline-data");
|
||||
xContentBuilder.array("types", "_doc");
|
||||
xContentBuilder.endObject();
|
||||
Request request = new Request("PUT", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId);
|
||||
request.setJsonEntity(Strings.toString(xContentBuilder));
|
||||
|
|
|
@ -92,7 +92,6 @@ public class CategorizationIT extends MlNativeAutodetectIntegTestCase {
|
|||
String datafeedId = job.getId() + "-feed";
|
||||
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, job.getId());
|
||||
datafeedConfig.setIndices(Collections.singletonList(DATA_INDEX));
|
||||
datafeedConfig.setTypes(Collections.singletonList(DATA_TYPE));
|
||||
DatafeedConfig datafeed = datafeedConfig.build();
|
||||
registerDatafeed(datafeed);
|
||||
putDatafeed(datafeed);
|
||||
|
@ -138,7 +137,6 @@ public class CategorizationIT extends MlNativeAutodetectIntegTestCase {
|
|||
String datafeedId = job.getId() + "-feed";
|
||||
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, job.getId());
|
||||
datafeedConfig.setIndices(Collections.singletonList(DATA_INDEX));
|
||||
datafeedConfig.setTypes(Collections.singletonList(DATA_TYPE));
|
||||
DatafeedConfig datafeed = datafeedConfig.build();
|
||||
registerDatafeed(datafeed);
|
||||
putDatafeed(datafeed);
|
||||
|
|
|
@ -1177,7 +1177,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
|||
Response build() throws IOException {
|
||||
Request request = new Request("PUT", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId);
|
||||
request.setJsonEntity("{"
|
||||
+ "\"job_id\": \"" + jobId + "\",\"indexes\":[\"" + index + "\"],\"types\":[\"" + type + "\"]"
|
||||
+ "\"job_id\": \"" + jobId + "\",\"indexes\":[\"" + index + "\"]"
|
||||
+ (source ? ",\"_source\":true" : "")
|
||||
+ (scriptedFields == null ? "" : ",\"script_fields\":" + scriptedFields)
|
||||
+ (aggregations == null ? "" : ",\"aggs\":" + aggregations)
|
||||
|
|
|
@ -49,7 +49,7 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
|||
public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase {
|
||||
|
||||
private static final String DATA_INDEX = "delete-expired-data-test-data";
|
||||
private static final String DATA_TYPE = "my_type";
|
||||
private static final String DATA_TYPE = "doc";
|
||||
|
||||
@Before
|
||||
public void setUpData() throws IOException {
|
||||
|
@ -111,7 +111,6 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase {
|
|||
String datafeedId = job.getId() + "-feed";
|
||||
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, job.getId());
|
||||
datafeedConfig.setIndices(Arrays.asList(DATA_INDEX));
|
||||
datafeedConfig.setTypes(Arrays.asList(DATA_TYPE));
|
||||
DatafeedConfig datafeed = datafeedConfig.build();
|
||||
registerDatafeed(datafeed);
|
||||
putDatafeed(datafeed);
|
||||
|
|
|
@ -109,7 +109,7 @@ public class InterimResultsDeletedAfterReopeningJobIT extends MlNativeAutodetect
|
|||
|
||||
private void assertNoInterimResults(String jobId) {
|
||||
String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId);
|
||||
SearchResponse search = client().prepareSearch(indexName).setTypes("result").setSize(1000)
|
||||
SearchResponse search = client().prepareSearch(indexName).setSize(1000)
|
||||
.setQuery(QueryBuilders.termQuery("is_interim", true)).get();
|
||||
assertThat(search.getHits().getTotalHits().value, equalTo(0L));
|
||||
}
|
||||
|
|
|
@ -24,11 +24,8 @@ class AggregationDataExtractor extends AbstractAggregationDataExtractor<SearchRe
|
|||
|
||||
@Override
|
||||
protected SearchRequestBuilder buildSearchRequest(SearchSourceBuilder searchSourceBuilder) {
|
||||
|
||||
return new SearchRequestBuilder(client, SearchAction.INSTANCE)
|
||||
.setSource(searchSourceBuilder)
|
||||
.setIndices(context.indices)
|
||||
.setTypes(context.types);
|
||||
.setIndices(context.indices);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@ class AggregationDataExtractorContext {
|
|||
final String timeField;
|
||||
final Set<String> fields;
|
||||
final String[] indices;
|
||||
final String[] types;
|
||||
final QueryBuilder query;
|
||||
final AggregatorFactories.Builder aggs;
|
||||
final long start;
|
||||
|
@ -27,14 +26,13 @@ class AggregationDataExtractorContext {
|
|||
final boolean includeDocCount;
|
||||
final Map<String, String> headers;
|
||||
|
||||
AggregationDataExtractorContext(String jobId, String timeField, Set<String> fields, List<String> indices, List<String> types,
|
||||
QueryBuilder query, AggregatorFactories.Builder aggs, long start, long end, boolean includeDocCount,
|
||||
AggregationDataExtractorContext(String jobId, String timeField, Set<String> fields, List<String> indices, QueryBuilder query,
|
||||
AggregatorFactories.Builder aggs, long start, long end, boolean includeDocCount,
|
||||
Map<String, String> headers) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
this.timeField = Objects.requireNonNull(timeField);
|
||||
this.fields = Objects.requireNonNull(fields);
|
||||
this.indices = indices.toArray(new String[indices.size()]);
|
||||
this.types = types.toArray(new String[types.size()]);
|
||||
this.query = Objects.requireNonNull(query);
|
||||
this.aggs = Objects.requireNonNull(aggs);
|
||||
this.start = start;
|
||||
|
|
|
@ -34,7 +34,6 @@ public class AggregationDataExtractorFactory implements DataExtractorFactory {
|
|||
job.getDataDescription().getTimeField(),
|
||||
job.getAnalysisConfig().analysisFields(),
|
||||
datafeedConfig.getIndices(),
|
||||
datafeedConfig.getTypes(),
|
||||
datafeedConfig.getParsedQuery(),
|
||||
datafeedConfig.getParsedAggregations(),
|
||||
Intervals.alignToCeil(start, histogramInterval),
|
||||
|
|
|
@ -56,7 +56,6 @@ public class RollupDataExtractorFactory implements DataExtractorFactory {
|
|||
job.getDataDescription().getTimeField(),
|
||||
job.getAnalysisConfig().analysisFields(),
|
||||
datafeedConfig.getIndices(),
|
||||
datafeedConfig.getTypes(),
|
||||
datafeedConfig.getParsedQuery(),
|
||||
datafeedConfig.getParsedAggregations(),
|
||||
Intervals.alignToCeil(start, histogramInterval),
|
||||
|
|
|
@ -191,7 +191,7 @@ public class ChunkedDataExtractor implements DataExtractor {
|
|||
}
|
||||
|
||||
private DataSummary newScrolledDataSummary() throws IOException {
|
||||
SearchRequestBuilder searchRequestBuilder = rangeSearchRequest().setTypes(context.types);
|
||||
SearchRequestBuilder searchRequestBuilder = rangeSearchRequest();
|
||||
|
||||
SearchResponse response = executeSearchRequest(searchRequestBuilder);
|
||||
LOGGER.debug("[{}] Scrolling Data summary response was obtained", context.jobId);
|
||||
|
|
|
@ -23,7 +23,6 @@ class ChunkedDataExtractorContext {
|
|||
final String jobId;
|
||||
final String timeField;
|
||||
final String[] indices;
|
||||
final String[] types;
|
||||
final QueryBuilder query;
|
||||
final int scrollSize;
|
||||
final long start;
|
||||
|
@ -34,14 +33,12 @@ class ChunkedDataExtractorContext {
|
|||
final boolean hasAggregations;
|
||||
final Long histogramInterval;
|
||||
|
||||
ChunkedDataExtractorContext(String jobId, String timeField, List<String> indices, List<String> types,
|
||||
QueryBuilder query, int scrollSize, long start, long end, @Nullable TimeValue chunkSpan,
|
||||
TimeAligner timeAligner, Map<String, String> headers, boolean hasAggregations,
|
||||
@Nullable Long histogramInterval) {
|
||||
ChunkedDataExtractorContext(String jobId, String timeField, List<String> indices, QueryBuilder query, int scrollSize, long start,
|
||||
long end, @Nullable TimeValue chunkSpan, TimeAligner timeAligner, Map<String, String> headers,
|
||||
boolean hasAggregations, @Nullable Long histogramInterval) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
this.timeField = Objects.requireNonNull(timeField);
|
||||
this.indices = indices.toArray(new String[indices.size()]);
|
||||
this.types = types.toArray(new String[types.size()]);
|
||||
this.query = Objects.requireNonNull(query);
|
||||
this.scrollSize = scrollSize;
|
||||
this.start = start;
|
||||
|
|
|
@ -35,7 +35,6 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory {
|
|||
job.getId(),
|
||||
job.getDataDescription().getTimeField(),
|
||||
datafeedConfig.getIndices(),
|
||||
datafeedConfig.getTypes(),
|
||||
datafeedConfig.getParsedQuery(),
|
||||
datafeedConfig.getScrollSize(),
|
||||
timeAligner.alignToCeil(start),
|
||||
|
|
|
@ -106,7 +106,6 @@ class ScrollDataExtractor implements DataExtractor {
|
|||
.setScroll(SCROLL_TIMEOUT)
|
||||
.addSort(context.extractedFields.timeField(), SortOrder.ASC)
|
||||
.setIndices(context.indices)
|
||||
.setTypes(context.types)
|
||||
.setSize(context.scrollSize)
|
||||
.setQuery(ExtractorUtils.wrapInTimeRangeQuery(
|
||||
context.query, context.extractedFields.timeField(), start, context.end));
|
||||
|
|
|
@ -18,7 +18,6 @@ class ScrollDataExtractorContext {
|
|||
final String jobId;
|
||||
final TimeBasedExtractedFields extractedFields;
|
||||
final String[] indices;
|
||||
final String[] types;
|
||||
final QueryBuilder query;
|
||||
final List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
final int scrollSize;
|
||||
|
@ -26,13 +25,12 @@ class ScrollDataExtractorContext {
|
|||
final long end;
|
||||
final Map<String, String> headers;
|
||||
|
||||
ScrollDataExtractorContext(String jobId, TimeBasedExtractedFields extractedFields, List<String> indices, List<String> types,
|
||||
QueryBuilder query, List<SearchSourceBuilder.ScriptField> scriptFields, int scrollSize,
|
||||
long start, long end, Map<String, String> headers) {
|
||||
ScrollDataExtractorContext(String jobId, TimeBasedExtractedFields extractedFields, List<String> indices, QueryBuilder query,
|
||||
List<SearchSourceBuilder.ScriptField> scriptFields, int scrollSize, long start, long end,
|
||||
Map<String, String> headers) {
|
||||
this.jobId = Objects.requireNonNull(jobId);
|
||||
this.extractedFields = Objects.requireNonNull(extractedFields);
|
||||
this.indices = indices.toArray(new String[indices.size()]);
|
||||
this.types = types.toArray(new String[types.size()]);
|
||||
this.query = Objects.requireNonNull(query);
|
||||
this.scriptFields = Objects.requireNonNull(scriptFields);
|
||||
this.scrollSize = scrollSize;
|
||||
|
|
|
@ -43,7 +43,6 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory {
|
|||
job.getId(),
|
||||
extractedFields,
|
||||
datafeedConfig.getIndices(),
|
||||
datafeedConfig.getTypes(),
|
||||
datafeedConfig.getParsedQuery(),
|
||||
datafeedConfig.getScriptFields(),
|
||||
datafeedConfig.getScrollSize(),
|
||||
|
|
|
@ -282,7 +282,6 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
|
|||
DatafeedConfig updatedDatafeed = updatedMetadata.getDatafeed(datafeedConfig1.getId());
|
||||
assertThat(updatedDatafeed.getJobId(), equalTo(datafeedConfig1.getJobId()));
|
||||
assertThat(updatedDatafeed.getIndices(), equalTo(datafeedConfig1.getIndices()));
|
||||
assertThat(updatedDatafeed.getTypes(), equalTo(datafeedConfig1.getTypes()));
|
||||
assertThat(updatedDatafeed.getScrollSize(), equalTo(5000));
|
||||
}
|
||||
|
||||
|
|
|
@ -229,7 +229,6 @@ public class DatafeedJobValidatorTests extends ESTestCase {
|
|||
private static DatafeedConfig.Builder createValidDatafeedConfig() {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("my-datafeed", "my-job");
|
||||
builder.setIndices(Collections.singletonList("myIndex"));
|
||||
builder.setTypes(Collections.singletonList("myType"));
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -340,7 +340,6 @@ public class DatafeedManagerTests extends ESTestCase {
|
|||
public static DatafeedConfig.Builder createDatafeedConfig(String datafeedId, String jobId) {
|
||||
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, jobId);
|
||||
datafeedConfig.setIndices(Collections.singletonList("myIndex"));
|
||||
datafeedConfig.setTypes(Collections.singletonList("myType"));
|
||||
return datafeedConfig;
|
||||
}
|
||||
|
||||
|
|
|
@ -89,7 +89,6 @@ public class DelayedDataDetectorFactoryTests extends ESTestCase {
|
|||
private DatafeedConfig createDatafeed(boolean shouldDetectDelayedData, TimeValue delayedDatacheckWindow) {
|
||||
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("id", "jobId");
|
||||
builder.setIndices(Collections.singletonList("index1"));
|
||||
builder.setTypes(Collections.singletonList("doc"));
|
||||
|
||||
if (shouldDetectDelayedData) {
|
||||
builder.setDelayedDataCheckConfig(DelayedDataCheckConfig.enabledDelayedDataCheckConfig(delayedDatacheckWindow));
|
||||
|
|
|
@ -51,7 +51,6 @@ public class AggregationDataExtractorTests extends ESTestCase {
|
|||
private String jobId;
|
||||
private String timeField;
|
||||
private Set<String> fields;
|
||||
private List<String> types;
|
||||
private List<String> indices;
|
||||
private QueryBuilder query;
|
||||
private AggregatorFactories.Builder aggs;
|
||||
|
@ -84,7 +83,6 @@ public class AggregationDataExtractorTests extends ESTestCase {
|
|||
fields = new HashSet<>();
|
||||
fields.addAll(Arrays.asList("time", "airline", "responsetime"));
|
||||
indices = Arrays.asList("index-1", "index-2");
|
||||
types = Arrays.asList("type-1", "type-2");
|
||||
query = QueryBuilders.matchAllQuery();
|
||||
aggs = new AggregatorFactories.Builder()
|
||||
.addAggregator(AggregationBuilders.histogram("time").field("time").interval(1000).subAggregation(
|
||||
|
@ -267,7 +265,7 @@ public class AggregationDataExtractorTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private AggregationDataExtractorContext createContext(long start, long end) {
|
||||
return new AggregationDataExtractorContext(jobId, timeField, fields, indices, types, query, aggs, start, end, true,
|
||||
return new AggregationDataExtractorContext(jobId, timeField, fields, indices, query, aggs, start, end, true,
|
||||
Collections.emptyMap());
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,6 @@ public class ChunkedDataExtractorTests extends ESTestCase {
|
|||
private List<SearchRequest> capturedSearchRequests;
|
||||
private String jobId;
|
||||
private String timeField;
|
||||
private List<String> types;
|
||||
private List<String> indices;
|
||||
private QueryBuilder query;
|
||||
private int scrollSize;
|
||||
|
@ -86,7 +85,6 @@ public class ChunkedDataExtractorTests extends ESTestCase {
|
|||
jobId = "test-job";
|
||||
timeField = "time";
|
||||
indices = Arrays.asList("index-1", "index-2");
|
||||
types = Arrays.asList("type-1", "type-2");
|
||||
query = QueryBuilders.matchAllQuery();
|
||||
scrollSize = 1000;
|
||||
chunkSpan = null;
|
||||
|
@ -561,7 +559,7 @@ public class ChunkedDataExtractorTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private ChunkedDataExtractorContext createContext(long start, long end, boolean hasAggregations, Long histogramInterval) {
|
||||
return new ChunkedDataExtractorContext(jobId, timeField, indices, types, query, scrollSize, start, end, chunkSpan,
|
||||
return new ChunkedDataExtractorContext(jobId, timeField, indices, query, scrollSize, start, end, chunkSpan,
|
||||
ChunkedDataExtractorFactory.newIdentityTimeAligner(), Collections.emptyMap(), hasAggregations, histogramInterval);
|
||||
}
|
||||
|
||||
|
|
|
@ -120,7 +120,6 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase {
|
|||
|
||||
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("feed", jobBuilder.getId());
|
||||
datafeedBuilder.setIndices(Collections.singletonList("foo"));
|
||||
datafeedBuilder.setTypes(Collections.singletonList("doc"));
|
||||
datafeedBuilder.setScriptFields(Collections.singletonList(new SearchSourceBuilder.ScriptField("airport", null, false)));
|
||||
|
||||
Map<String, FieldCapabilities> timeCaps = new HashMap<>();
|
||||
|
@ -200,7 +199,6 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase {
|
|||
|
||||
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("feed", jobBuilder.getId());
|
||||
datafeedBuilder.setIndices(Collections.singletonList("foo"));
|
||||
datafeedBuilder.setTypes(Collections.singletonList("doc"));
|
||||
|
||||
Map<String, FieldCapabilities> timeCaps = new HashMap<>();
|
||||
timeCaps.put("date", createFieldCaps(false));
|
||||
|
@ -220,7 +218,6 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase {
|
|||
|
||||
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("feed", jobBuilder.getId());
|
||||
datafeedBuilder.setIndices(Collections.singletonList("foo"));
|
||||
datafeedBuilder.setTypes(Collections.singletonList("doc"));
|
||||
|
||||
Map<String, FieldCapabilities> timeCaps = new HashMap<>();
|
||||
timeCaps.put("date", createFieldCaps(true));
|
||||
|
@ -241,7 +238,6 @@ public class TimeBasedExtractedFieldsTests extends ESTestCase {
|
|||
|
||||
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("feed", jobBuilder.getId());
|
||||
datafeedBuilder.setIndices(Collections.singletonList("foo"));
|
||||
datafeedBuilder.setTypes(Collections.singletonList("doc"));
|
||||
|
||||
Map<String, FieldCapabilities> timeCaps = new HashMap<>();
|
||||
timeCaps.put("date", createFieldCaps(true));
|
||||
|
|
|
@ -65,7 +65,6 @@ public class ScrollDataExtractorTests extends ESTestCase {
|
|||
private ArgumentCaptor<ClearScrollRequest> capturedClearScrollRequests;
|
||||
private String jobId;
|
||||
private TimeBasedExtractedFields extractedFields;
|
||||
private List<String> types;
|
||||
private List<String> indices;
|
||||
private QueryBuilder query;
|
||||
private List<SearchSourceBuilder.ScriptField> scriptFields;
|
||||
|
@ -134,7 +133,6 @@ public class ScrollDataExtractorTests extends ESTestCase {
|
|||
extractedFields = new TimeBasedExtractedFields(timeField,
|
||||
Arrays.asList(timeField, ExtractedField.newField("field_1", ExtractedField.ExtractionMethod.DOC_VALUE)));
|
||||
indices = Arrays.asList("index-1", "index-2");
|
||||
types = Arrays.asList("type-1", "type-2");
|
||||
query = QueryBuilders.matchAllQuery();
|
||||
scriptFields = Collections.emptyList();
|
||||
scrollSize = 1000;
|
||||
|
@ -422,7 +420,7 @@ public class ScrollDataExtractorTests extends ESTestCase {
|
|||
|
||||
List<SearchSourceBuilder.ScriptField> sFields = Arrays.asList(withoutSplit, withSplit);
|
||||
ScrollDataExtractorContext context = new ScrollDataExtractorContext(jobId, extractedFields, indices,
|
||||
types, query, sFields, scrollSize, 1000, 2000, Collections.emptyMap());
|
||||
query, sFields, scrollSize, 1000, 2000, Collections.emptyMap());
|
||||
|
||||
TestDataExtractor extractor = new TestDataExtractor(context);
|
||||
|
||||
|
@ -467,7 +465,7 @@ public class ScrollDataExtractorTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private ScrollDataExtractorContext createContext(long start, long end) {
|
||||
return new ScrollDataExtractorContext(jobId, extractedFields, indices, types, query, scriptFields, scrollSize, start, end,
|
||||
return new ScrollDataExtractorContext(jobId, extractedFields, indices, query, scriptFields, scrollSize, start, end,
|
||||
Collections.emptyMap());
|
||||
}
|
||||
|
||||
|
|
|
@ -199,7 +199,6 @@ public abstract class BaseMlIntegTestCase extends ESIntegTestCase {
|
|||
builder.setQueryDelay(TimeValue.timeValueSeconds(1));
|
||||
builder.setFrequency(TimeValue.timeValueSeconds(1));
|
||||
builder.setIndices(indices);
|
||||
builder.setTypes(Collections.singletonList("type"));
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
|
|
@ -80,8 +80,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"a-missing-job",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
|
||||
---
|
||||
|
@ -105,13 +104,11 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
- match: { datafeed_id: "test-datafeed-1" }
|
||||
- match: { job_id: "datafeeds-crud-1" }
|
||||
- match: { indices: ["index-foo"] }
|
||||
- match: { types: ["type-bar"] }
|
||||
- match: { scroll_size: 1000 }
|
||||
- is_true: query.match_all
|
||||
- match: { chunking_config: { mode: "auto" }}
|
||||
|
@ -124,8 +121,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
- match: { datafeed_id: "test-datafeed-1" }
|
||||
|
||||
|
@ -136,8 +132,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"datafeeds-crud-2",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
|
||||
---
|
||||
|
@ -148,8 +143,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
- match: { datafeed_id: "test-datafeed-1" }
|
||||
|
||||
|
@ -160,8 +154,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
|
||||
---
|
||||
|
@ -174,7 +167,6 @@ setup:
|
|||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"],
|
||||
"query":{"match_all_misspelled":{}}
|
||||
}
|
||||
|
||||
|
@ -187,7 +179,6 @@ setup:
|
|||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"],
|
||||
"scroll_size": 2000,
|
||||
"frequency": "1m",
|
||||
"query_delay": "30s"
|
||||
|
@ -206,7 +197,6 @@ setup:
|
|||
- match: { datafeed_id: "test-datafeed-1" }
|
||||
- match: { job_id: "datafeeds-crud-1" }
|
||||
- match: { indices: ["index-*"] }
|
||||
- match: { types: ["type-bar"] }
|
||||
- match: { scroll_size: 10000 }
|
||||
- match: { frequency: "2m" }
|
||||
- match: { query_delay: "0s" }
|
||||
|
@ -220,7 +210,6 @@ setup:
|
|||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"],
|
||||
"scroll_size": 2000
|
||||
}
|
||||
|
||||
|
@ -234,7 +223,6 @@ setup:
|
|||
- match: { datafeed_id: "test-datafeed-1" }
|
||||
- match: { job_id: "datafeeds-crud-2" }
|
||||
- match: { indices: ["index-foo"] }
|
||||
- match: { types: ["type-bar"] }
|
||||
|
||||
---
|
||||
"Test update datafeed with missing id":
|
||||
|
@ -255,7 +243,6 @@ setup:
|
|||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"],
|
||||
"scroll_size": 2000
|
||||
}
|
||||
|
||||
|
@ -276,8 +263,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -286,8 +272,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"datafeeds-crud-2",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -315,7 +300,6 @@ setup:
|
|||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indices":["index-foo"],
|
||||
"types":["type-bar"],
|
||||
"chunking_config": {"mode":"manual","time_span": "1h"}
|
||||
}
|
||||
- match: { datafeed_id: "test-datafeed-1" }
|
||||
|
@ -331,7 +315,6 @@ setup:
|
|||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indices":["index-foo"],
|
||||
"types":["type-bar"],
|
||||
"aggs": {
|
||||
"histogram_buckets":{
|
||||
"date_histogram": {
|
||||
|
@ -385,8 +368,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
- match: { datafeed_id: "test-datafeed-1" }
|
||||
|
||||
|
@ -403,8 +385,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"datafeeds-crud-1",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
- match: { datafeed_id: "test-datafeed-1" }
|
||||
|
||||
|
|
|
@ -65,8 +65,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"force-delete-job",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
- match: { datafeed_id: force-delete-job-datafeed }
|
||||
|
||||
|
|
|
@ -81,8 +81,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"get-datafeed-stats-1",
|
||||
"indexes":["index-1"],
|
||||
"types":["type-1"]
|
||||
"indexes":["index-1"]
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -93,8 +92,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"get-datafeed-stats-2",
|
||||
"indexes":["index-2"],
|
||||
"types":["type-2"]
|
||||
"indexes":["index-2"]
|
||||
}
|
||||
|
||||
---
|
||||
|
|
|
@ -45,8 +45,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"get-datafeed-1",
|
||||
"indexes":["index-1"],
|
||||
"types":["type-1"]
|
||||
"indexes":["index-1"]
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -57,8 +56,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"get-datafeed-2",
|
||||
"indexes":["index-2"],
|
||||
"types":["type-2"]
|
||||
"indexes":["index-2"]
|
||||
}
|
||||
|
||||
---
|
||||
|
|
|
@ -516,8 +516,7 @@
|
|||
body: >
|
||||
{
|
||||
"job_id":"jobs-crud-datafeed-job",
|
||||
"indexes":["index-foo"],
|
||||
"types":["type-bar"]
|
||||
"indexes":["index-foo"]
|
||||
}
|
||||
- match: { datafeed_id: "jobs-crud-test-datafeed-1" }
|
||||
|
||||
|
|
|
@ -66,8 +66,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"jobs-get-stats-datafeed-job",
|
||||
"indexes":["farequote"],
|
||||
"types":["response"]
|
||||
"indexes":["farequote"]
|
||||
}
|
||||
|
||||
---
|
||||
|
|
|
@ -94,8 +94,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"preview-datafeed-job",
|
||||
"indexes":"airline-data",
|
||||
"types":"response"
|
||||
"indexes":"airline-data"
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -140,7 +139,6 @@ setup:
|
|||
{
|
||||
"job_id":"aggregation-doc-count-job",
|
||||
"indexes":"airline-data",
|
||||
"types":"response",
|
||||
"aggregations": {
|
||||
"buckets": {
|
||||
"histogram": {
|
||||
|
@ -213,7 +211,6 @@ setup:
|
|||
{
|
||||
"job_id":"aggregation-custom-single-metric-summary-job",
|
||||
"indexes":"airline-data",
|
||||
"types":"response",
|
||||
"aggregations": {
|
||||
"buckets": {
|
||||
"histogram": {
|
||||
|
@ -272,7 +269,6 @@ setup:
|
|||
{
|
||||
"job_id":"aggregation-custom-multi-metric-summary-job",
|
||||
"indexes":"airline-data",
|
||||
"types":"response",
|
||||
"aggregations": {
|
||||
"buckets": {
|
||||
"histogram": {
|
||||
|
@ -358,8 +354,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"unavailable-job",
|
||||
"indexes":"unavailable-data",
|
||||
"types":"response"
|
||||
"indexes":"unavailable-data"
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -391,7 +386,6 @@ setup:
|
|||
{
|
||||
"job_id":"empty-job",
|
||||
"indexes":"airline-data",
|
||||
"types":"response",
|
||||
"query": {
|
||||
"term": {"airline":"missing"}
|
||||
}
|
||||
|
|
|
@ -48,8 +48,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"start-stop-datafeed-job",
|
||||
"indexes":"airline-data",
|
||||
"types":"response"
|
||||
"indexes":"airline-data"
|
||||
}
|
||||
|
||||
---
|
||||
|
@ -222,8 +221,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"start-stop-datafeed-job-field-without-mappings",
|
||||
"indexes":"airline-data",
|
||||
"types":"response"
|
||||
"indexes":"airline-data"
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -330,8 +328,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"start-stop-datafeed-job-foo-1",
|
||||
"indexes":"airline-data",
|
||||
"types":"response"
|
||||
"indexes":"airline-data"
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -340,8 +337,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"start-stop-datafeed-job-foo-2",
|
||||
"indexes":"airline-data",
|
||||
"types":"response"
|
||||
"indexes":"airline-data"
|
||||
}
|
||||
|
||||
- do:
|
||||
|
@ -350,8 +346,7 @@ setup:
|
|||
body: >
|
||||
{
|
||||
"job_id":"start-stop-datafeed-job-bar-1",
|
||||
"indexes":"airline-data",
|
||||
"types":"response"
|
||||
"indexes":"airline-data"
|
||||
}
|
||||
|
||||
- do:
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
datafeed_id: old-cluster-datafeed
|
||||
- match: { datafeeds.0.datafeed_id: "old-cluster-datafeed"}
|
||||
- length: { datafeeds.0.indices: 1 }
|
||||
- length: { datafeeds.0.types: 1 }
|
||||
- gte: { datafeeds.0.scroll_size: 2000 }
|
||||
|
||||
- do:
|
||||
|
@ -43,7 +42,6 @@
|
|||
{
|
||||
"job_id":"mixed-cluster-datafeed-job",
|
||||
"indices":["airline-data"],
|
||||
"types":["response"],
|
||||
"scroll_size": 2000
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,6 @@ setup:
|
|||
datafeed_id: old-cluster-datafeed
|
||||
- match: { datafeeds.0.datafeed_id: "old-cluster-datafeed"}
|
||||
- length: { datafeeds.0.indices: 1 }
|
||||
- length: { datafeeds.0.types: 1 }
|
||||
- gte: { datafeeds.0.scroll_size: 2000 }
|
||||
|
||||
- do:
|
||||
|
@ -37,7 +36,6 @@ setup:
|
|||
datafeed_id: mixed-cluster-datafeed
|
||||
- match: { datafeeds.0.datafeed_id: "mixed-cluster-datafeed"}
|
||||
- length: { datafeeds.0.indices: 1 }
|
||||
- length: { datafeeds.0.types: 1 }
|
||||
- gte: { datafeeds.0.scroll_size: 2000 }
|
||||
|
||||
- do:
|
||||
|
|
|
@ -137,7 +137,6 @@ public class MLTransportClientIT extends ESXPackSmokeClientTestCase {
|
|||
String datafeedIndex = "ml-transport-client-test";
|
||||
String datatype = "type-bar";
|
||||
datafeed.setIndices(Collections.singletonList(datafeedIndex));
|
||||
datafeed.setTypes(Collections.singletonList("type-bar"));
|
||||
|
||||
mlClient.putDatafeed(new PutDatafeedAction.Request(datafeed.build())).actionGet();
|
||||
|
||||
|
|
Loading…
Reference in New Issue