[ML] Rename datafeed indexes to indices (elastic/x-pack-elasticsearch#1297)

Renames datafeed indexes to indices but keeps indexes
as a synonym while parsing.

relates elastic/x-pack-elasticsearch#1292

Original commit: elastic/x-pack-elasticsearch@1fcdd97f88
This commit is contained in:
Dimitris Athanasiou 2017-05-04 11:37:12 +01:00 committed by GitHub
parent b922b37c3f
commit 7f2c7dbe17
31 changed files with 130 additions and 128 deletions

View File

@ -30,7 +30,7 @@ A {dfeed} resource has the following properties:
bucket spans, or, for longer bucket spans, a sensible fraction of the bucket
span. For example: "150s"
`indexes` (required)::
`indices` (required)::
(array) An array of index names. For example: ["it_ops_metrics"]
`job_id` (required)::

View File

@ -38,7 +38,7 @@ You must create a job before you create a {dfeed}. You can associate only one
bucket spans, or, for longer bucket spans, a sensible fraction of the bucket
span. For example: "150s".
`indexes` (required)::
`indices` (required)::
(array) An array of index names. Wildcards are supported. For example:
["it_ops_metrics", "server*"].
@ -83,7 +83,7 @@ The following example creates the `datafeed-it-ops-kpi` {dfeed}:
PUT _xpack/ml/datafeeds/datafeed-it-ops-kpi
{
"job_id": "it-ops-kpi",
"indexes": ["it_ops_metrics"],
"indices": ["it_ops_metrics"],
"types": ["kpi","network","sql"],
"query": {
"match_all": {
@ -102,7 +102,7 @@ When the {dfeed} is created, you receive the following results:
"datafeed_id": "datafeed-it-ops-kpi",
"job_id": "it-ops-kpi",
"query_delay": "1m",
"indexes": [
"indices": [
"it_ops_metrics"
],
"types": [

View File

@ -33,7 +33,7 @@ The following properties can be updated after the {dfeed} is created:
bucket spans, or, for longer bucket spans, a sensible fraction of the bucket
span. For example: "150s".
`indexes`::
`indices`::
(array) An array of index names. Wildcards are supported. For example:
["it_ops_metrics", "server*"].
@ -97,7 +97,7 @@ with the updated values:
"datafeed_id": "datafeed-it-ops-kpi",
"job_id": "it-ops-kpi",
"query_delay": "1m",
"indexes": ["it-ops"],
"indices": ["it-ops"],
"types": ["logs"],
"query": {
"term": {

View File

@ -233,8 +233,7 @@ public class PutDatafeedAction extends Action<PutDatafeedAction.Request, PutData
// We just check for permission to use the search action. In reality we'll also
// use the scroll action, but that's considered an implementation detail.
privRequest.indexPrivileges(RoleDescriptor.IndicesPrivileges.builder()
.indices(request.getDatafeed().getIndexes()
.toArray(new String[0]))
.indices(request.getDatafeed().getIndices().toArray(new String[0]))
.privileges(SearchAction.NAME)
.build());
@ -259,7 +258,7 @@ public class PutDatafeedAction extends Action<PutDatafeedAction.Request, PutData
builder.endObject();
listener.onFailure(Exceptions.authorizationError("Cannot create datafeed [{}]" +
" because user {} lacks permissions on the indexes to be" +
" because user {} lacks permissions on the indices to be" +
" searched: {}",
request.getDatafeed().getId(), username, builder.string()));
}

View File

@ -602,7 +602,7 @@ public class StartDatafeedAction
private static String verifyIndicesActive(Logger logger, DatafeedConfig datafeed, ClusterState clusterState,
IndexNameExpressionResolver resolver) {
List<String> indices = datafeed.getIndexes();
List<String> indices = datafeed.getIndices();
for (String index : indices) {
String[] concreteIndices;
String reason = "cannot start datafeed [" + datafeed.getId() + "] because index ["

View File

@ -63,6 +63,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
public static final ParseField QUERY_DELAY = new ParseField("query_delay");
public static final ParseField FREQUENCY = new ParseField("frequency");
public static final ParseField INDEXES = new ParseField("indexes");
public static final ParseField INDICES = new ParseField("indices");
public static final ParseField TYPES = new ParseField("types");
public static final ParseField QUERY = new ParseField("query");
public static final ParseField SCROLL_SIZE = new ParseField("scroll_size");
@ -77,7 +78,8 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
static {
PARSER.declareString(Builder::setId, ID);
PARSER.declareString(Builder::setJobId, Job.ID);
PARSER.declareStringArray(Builder::setIndexes, INDEXES);
PARSER.declareStringArray(Builder::setIndices, INDEXES);
PARSER.declareStringArray(Builder::setIndices, INDICES);
PARSER.declareStringArray(Builder::setTypes, TYPES);
PARSER.declareString((builder, val) ->
builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY);
@ -114,7 +116,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
*/
private final TimeValue frequency;
private final List<String> indexes;
private final List<String> indices;
private final List<String> types;
private final QueryBuilder query;
private final AggregatorFactories.Builder aggregations;
@ -123,14 +125,14 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
private final boolean source;
private final ChunkingConfig chunkingConfig;
private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indexes, List<String> types,
private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, List<String> types,
QueryBuilder query, AggregatorFactories.Builder aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
Integer scrollSize, boolean source, ChunkingConfig chunkingConfig) {
this.id = id;
this.jobId = jobId;
this.queryDelay = queryDelay;
this.frequency = frequency;
this.indexes = indexes;
this.indices = indices;
this.types = types;
this.query = query;
this.aggregations = aggregations;
@ -146,9 +148,9 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
this.queryDelay = in.readOptionalWriteable(TimeValue::new);
this.frequency = in.readOptionalWriteable(TimeValue::new);
if (in.readBoolean()) {
this.indexes = in.readList(StreamInput::readString);
this.indices = in.readList(StreamInput::readString);
} else {
this.indexes = null;
this.indices = null;
}
if (in.readBoolean()) {
this.types = in.readList(StreamInput::readString);
@ -183,8 +185,8 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
return frequency;
}
public List<String> getIndexes() {
return indexes;
public List<String> getIndices() {
return indices;
}
public List<String> getTypes() {
@ -315,9 +317,9 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
out.writeString(jobId);
out.writeOptionalWriteable(queryDelay);
out.writeOptionalWriteable(frequency);
if (indexes != null) {
if (indices != null) {
out.writeBoolean(true);
out.writeStringList(indexes);
out.writeStringList(indices);
} else {
out.writeBoolean(false);
}
@ -355,7 +357,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
if (frequency != null) {
builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep());
}
builder.field(INDEXES.getPreferredName(), indexes);
builder.field(INDICES.getPreferredName(), indices);
builder.field(TYPES.getPreferredName(), types);
builder.field(QUERY.getPreferredName(), query);
if (aggregations != null) {
@ -379,8 +381,8 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
}
/**
* The lists of indexes and types are compared for equality but they are not
* sorted first so this test could fail simply because the indexes and types
* The lists of indices and types are compared for equality but they are not
* sorted first so this test could fail simply because the indices and types
* lists are in different orders.
*/
@Override
@ -399,7 +401,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
&& Objects.equals(this.jobId, that.jobId)
&& Objects.equals(this.frequency, that.frequency)
&& Objects.equals(this.queryDelay, that.queryDelay)
&& Objects.equals(this.indexes, that.indexes)
&& Objects.equals(this.indices, that.indices)
&& Objects.equals(this.types, that.types)
&& Objects.equals(this.query, that.query)
&& Objects.equals(this.scrollSize, that.scrollSize)
@ -411,7 +413,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
@Override
public int hashCode() {
return Objects.hash(id, jobId, frequency, queryDelay, indexes, types, query, scrollSize, aggregations, scriptFields, source,
return Objects.hash(id, jobId, frequency, queryDelay, indices, types, query, scrollSize, aggregations, scriptFields, source,
chunkingConfig);
}
@ -430,7 +432,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
private String jobId;
private TimeValue queryDelay = DEFAULT_QUERY_DELAY;
private TimeValue frequency;
private List<String> indexes = Collections.emptyList();
private List<String> indices = Collections.emptyList();
private List<String> types = Collections.emptyList();
private QueryBuilder query = QueryBuilders.matchAllQuery();
private AggregatorFactories.Builder aggregations;
@ -453,7 +455,7 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
this.jobId = config.jobId;
this.queryDelay = config.queryDelay;
this.frequency = config.frequency;
this.indexes = config.indexes;
this.indices = config.indices;
this.types = config.types;
this.query = config.query;
this.aggregations = config.aggregations;
@ -471,8 +473,8 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
}
public void setIndexes(List<String> indexes) {
this.indexes = ExceptionsHelper.requireNonNull(indexes, INDEXES.getPreferredName());
public void setIndices(List<String> indices) {
this.indices = ExceptionsHelper.requireNonNull(indices, INDICES.getPreferredName());
}
public void setTypes(List<String> types) {
@ -529,15 +531,15 @@ public class DatafeedConfig extends AbstractDiffable<DatafeedConfig> implements
if (!MlStrings.isValidId(id)) {
throw new IllegalArgumentException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName()));
}
if (indexes == null || indexes.isEmpty() || indexes.contains(null) || indexes.contains("")) {
throw invalidOptionValue(INDEXES.getPreferredName(), indexes);
if (indices == null || indices.isEmpty() || indices.contains(null) || indices.contains("")) {
throw invalidOptionValue(INDICES.getPreferredName(), indices);
}
if (types == null || types.isEmpty() || types.contains(null) || types.contains("")) {
throw invalidOptionValue(TYPES.getPreferredName(), types);
}
validateAggregations();
setDefaultChunkingConfig();
return new DatafeedConfig(id, jobId, queryDelay, frequency, indexes, types, query, aggregations, scriptFields, scrollSize,
return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize,
source, chunkingConfig);
}

View File

@ -40,7 +40,8 @@ public class DatafeedUpdate implements Writeable, ToXContent {
static {
PARSER.declareString(Builder::setId, DatafeedConfig.ID);
PARSER.declareString(Builder::setJobId, Job.ID);
PARSER.declareStringArray(Builder::setIndexes, DatafeedConfig.INDEXES);
PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES);
PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES);
PARSER.declareStringArray(Builder::setTypes, DatafeedConfig.TYPES);
PARSER.declareString((builder, val) -> builder.setQueryDelay(
TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), DatafeedConfig.QUERY_DELAY);
@ -69,7 +70,7 @@ public class DatafeedUpdate implements Writeable, ToXContent {
private final String jobId;
private final TimeValue queryDelay;
private final TimeValue frequency;
private final List<String> indexes;
private final List<String> indices;
private final List<String> types;
private final QueryBuilder query;
private final AggregatorFactories.Builder aggregations;
@ -78,14 +79,14 @@ public class DatafeedUpdate implements Writeable, ToXContent {
private final Boolean source;
private final ChunkingConfig chunkingConfig;
private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indexes, List<String> types,
private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List<String> indices, List<String> types,
QueryBuilder query, AggregatorFactories.Builder aggregations, List<SearchSourceBuilder.ScriptField> scriptFields,
Integer scrollSize, Boolean source, ChunkingConfig chunkingConfig) {
this.id = id;
this.jobId = jobId;
this.queryDelay = queryDelay;
this.frequency = frequency;
this.indexes = indexes;
this.indices = indices;
this.types = types;
this.query = query;
this.aggregations = aggregations;
@ -101,9 +102,9 @@ public class DatafeedUpdate implements Writeable, ToXContent {
this.queryDelay = in.readOptionalWriteable(TimeValue::new);
this.frequency = in.readOptionalWriteable(TimeValue::new);
if (in.readBoolean()) {
this.indexes = in.readList(StreamInput::readString);
this.indices = in.readList(StreamInput::readString);
} else {
this.indexes = null;
this.indices = null;
}
if (in.readBoolean()) {
this.types = in.readList(StreamInput::readString);
@ -135,9 +136,9 @@ public class DatafeedUpdate implements Writeable, ToXContent {
out.writeOptionalString(jobId);
out.writeOptionalWriteable(queryDelay);
out.writeOptionalWriteable(frequency);
if (indexes != null) {
if (indices != null) {
out.writeBoolean(true);
out.writeStringList(indexes);
out.writeStringList(indices);
} else {
out.writeBoolean(false);
}
@ -171,7 +172,7 @@ public class DatafeedUpdate implements Writeable, ToXContent {
if (frequency != null) {
builder.field(DatafeedConfig.FREQUENCY.getPreferredName(), frequency.getStringRep());
}
addOptionalField(builder, DatafeedConfig.INDEXES, indexes);
addOptionalField(builder, DatafeedConfig.INDICES, indices);
addOptionalField(builder, DatafeedConfig.TYPES, types);
addOptionalField(builder, DatafeedConfig.QUERY, query);
addOptionalField(builder, DatafeedConfig.AGGREGATIONS, aggregations);
@ -214,8 +215,8 @@ public class DatafeedUpdate implements Writeable, ToXContent {
if (frequency != null) {
builder.setFrequency(frequency);
}
if (indexes != null) {
builder.setIndexes(indexes);
if (indices != null) {
builder.setIndices(indices);
}
if (types != null) {
builder.setTypes(types);
@ -242,8 +243,8 @@ public class DatafeedUpdate implements Writeable, ToXContent {
}
/**
* The lists of indexes and types are compared for equality but they are not
* sorted first so this test could fail simply because the indexes and types
* The lists of indices and types are compared for equality but they are not
* sorted first so this test could fail simply because the indices and types
* lists are in different orders.
*/
@Override
@ -262,7 +263,7 @@ public class DatafeedUpdate implements Writeable, ToXContent {
&& Objects.equals(this.jobId, that.jobId)
&& Objects.equals(this.frequency, that.frequency)
&& Objects.equals(this.queryDelay, that.queryDelay)
&& Objects.equals(this.indexes, that.indexes)
&& Objects.equals(this.indices, that.indices)
&& Objects.equals(this.types, that.types)
&& Objects.equals(this.query, that.query)
&& Objects.equals(this.scrollSize, that.scrollSize)
@ -274,7 +275,7 @@ public class DatafeedUpdate implements Writeable, ToXContent {
@Override
public int hashCode() {
return Objects.hash(id, jobId, frequency, queryDelay, indexes, types, query, scrollSize, aggregations, scriptFields, source,
return Objects.hash(id, jobId, frequency, queryDelay, indices, types, query, scrollSize, aggregations, scriptFields, source,
chunkingConfig);
}
@ -289,7 +290,7 @@ public class DatafeedUpdate implements Writeable, ToXContent {
private String jobId;
private TimeValue queryDelay;
private TimeValue frequency;
private List<String> indexes;
private List<String> indices;
private List<String> types;
private QueryBuilder query;
private AggregatorFactories.Builder aggregations;
@ -310,7 +311,7 @@ public class DatafeedUpdate implements Writeable, ToXContent {
this.jobId = config.jobId;
this.queryDelay = config.queryDelay;
this.frequency = config.frequency;
this.indexes = config.indexes;
this.indices = config.indices;
this.types = config.types;
this.query = config.query;
this.aggregations = config.aggregations;
@ -328,8 +329,8 @@ public class DatafeedUpdate implements Writeable, ToXContent {
this.jobId = jobId;
}
public void setIndexes(List<String> indexes) {
this.indexes = indexes;
public void setIndices(List<String> indices) {
this.indices = indices;
}
public void setTypes(List<String> types) {
@ -371,7 +372,7 @@ public class DatafeedUpdate implements Writeable, ToXContent {
}
public DatafeedUpdate build() {
return new DatafeedUpdate(id, jobId, queryDelay, frequency, indexes, types, query, aggregations, scriptFields, scrollSize,
return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize,
source, chunkingConfig);
}
}

View File

@ -107,7 +107,7 @@ class AggregationDataExtractor implements DataExtractor {
private SearchRequestBuilder buildSearchRequest() {
SearchRequestBuilder searchRequestBuilder = SearchAction.INSTANCE.newRequestBuilder(client)
.setIndices(context.indexes)
.setIndices(context.indices)
.setTypes(context.types)
.setSize(0)
.setQuery(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, context.start, context.end));

View File

@ -15,7 +15,7 @@ class AggregationDataExtractorContext {
final String jobId;
final String timeField;
final String[] indexes;
final String[] indices;
final String[] types;
final QueryBuilder query;
final AggregatorFactories.Builder aggs;
@ -23,11 +23,11 @@ class AggregationDataExtractorContext {
final long end;
final boolean includeDocCount;
AggregationDataExtractorContext(String jobId, String timeField, List<String> indexes, List<String> types, QueryBuilder query,
AggregationDataExtractorContext(String jobId, String timeField, List<String> indices, List<String> types, QueryBuilder query,
AggregatorFactories.Builder aggs, long start, long end, boolean includeDocCount) {
this.jobId = Objects.requireNonNull(jobId);
this.timeField = Objects.requireNonNull(timeField);
this.indexes = indexes.toArray(new String[indexes.size()]);
this.indices = indices.toArray(new String[indices.size()]);
this.types = types.toArray(new String[types.size()]);
this.query = Objects.requireNonNull(query);
this.aggs = Objects.requireNonNull(aggs);

View File

@ -30,7 +30,7 @@ public class AggregationDataExtractorFactory implements DataExtractorFactory {
AggregationDataExtractorContext dataExtractorContext = new AggregationDataExtractorContext(
job.getId(),
job.getDataDescription().getTimeField(),
datafeedConfig.getIndexes(),
datafeedConfig.getIndices(),
datafeedConfig.getTypes(),
datafeedConfig.getQuery(),
datafeedConfig.getAggregations(),

View File

@ -108,7 +108,7 @@ public class ChunkedDataExtractor implements DataExtractor {
private DataSummary requestDataSummary() throws IOException {
SearchRequestBuilder searchRequestBuilder = SearchAction.INSTANCE.newRequestBuilder(client)
.setSize(0)
.setIndices(context.indexes)
.setIndices(context.indices)
.setTypes(context.types)
.setQuery(ExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, currentStart, context.end))
.addAggregation(AggregationBuilders.min(EARLIEST_TIME).field(context.timeField))

View File

@ -16,7 +16,7 @@ class ChunkedDataExtractorContext {
final String jobId;
final String timeField;
final String[] indexes;
final String[] indices;
final String[] types;
final QueryBuilder query;
final int scrollSize;
@ -24,11 +24,11 @@ class ChunkedDataExtractorContext {
final long end;
final TimeValue chunkSpan;
ChunkedDataExtractorContext(String jobId, String timeField, List<String> indexes, List<String> types,
ChunkedDataExtractorContext(String jobId, String timeField, List<String> indices, List<String> types,
QueryBuilder query, int scrollSize, long start, long end, @Nullable TimeValue chunkSpan) {
this.jobId = Objects.requireNonNull(jobId);
this.timeField = Objects.requireNonNull(timeField);
this.indexes = indexes.toArray(new String[indexes.size()]);
this.indices = indices.toArray(new String[indices.size()]);
this.types = types.toArray(new String[types.size()]);
this.query = Objects.requireNonNull(query);
this.scrollSize = scrollSize;

View File

@ -32,7 +32,7 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory {
ChunkedDataExtractorContext dataExtractorContext = new ChunkedDataExtractorContext(
job.getId(),
job.getDataDescription().getTimeField(),
datafeedConfig.getIndexes(),
datafeedConfig.getIndices(),
datafeedConfig.getTypes(),
datafeedConfig.getQuery(),
datafeedConfig.getScrollSize(),

View File

@ -99,7 +99,7 @@ class ScrollDataExtractor implements DataExtractor {
SearchRequestBuilder searchRequestBuilder = SearchAction.INSTANCE.newRequestBuilder(client)
.setScroll(SCROLL_TIMEOUT)
.addSort(context.extractedFields.timeField(), SortOrder.ASC)
.setIndices(context.indexes)
.setIndices(context.indices)
.setTypes(context.types)
.setSize(context.scrollSize)
.setQuery(ExtractorUtils.wrapInTimeRangeQuery(

View File

@ -15,7 +15,7 @@ class ScrollDataExtractorContext {
final String jobId;
final ExtractedFields extractedFields;
final String[] indexes;
final String[] indices;
final String[] types;
final QueryBuilder query;
final List<SearchSourceBuilder.ScriptField> scriptFields;
@ -23,12 +23,12 @@ class ScrollDataExtractorContext {
final long start;
final long end;
ScrollDataExtractorContext(String jobId, ExtractedFields extractedFields, List<String> indexes, List<String> types,
ScrollDataExtractorContext(String jobId, ExtractedFields extractedFields, List<String> indices, List<String> types,
QueryBuilder query, List<SearchSourceBuilder.ScriptField> scriptFields, int scrollSize,
long start, long end) {
this.jobId = Objects.requireNonNull(jobId);
this.extractedFields = Objects.requireNonNull(extractedFields);
this.indexes = indexes.toArray(new String[indexes.size()]);
this.indices = indices.toArray(new String[indices.size()]);
this.types = types.toArray(new String[types.size()]);
this.query = Objects.requireNonNull(query);
this.scriptFields = Objects.requireNonNull(scriptFields);

View File

@ -32,7 +32,7 @@ public class ScrollDataExtractorFactory implements DataExtractorFactory {
ScrollDataExtractorContext dataExtractorContext = new ScrollDataExtractorContext(
job.getId(),
extractedFields,
datafeedConfig.getIndexes(),
datafeedConfig.getIndices(),
datafeedConfig.getTypes(),
datafeedConfig.getQuery(),
datafeedConfig.getScriptFields(),

View File

@ -251,7 +251,7 @@ public class MlMetadataTests extends AbstractSerializingTestCase<MlMetadata> {
DatafeedConfig updatedDatafeed = updatedMetadata.getDatafeed(datafeedConfig1.getId());
assertThat(updatedDatafeed.getJobId(), equalTo(datafeedConfig1.getJobId()));
assertThat(updatedDatafeed.getIndexes(), equalTo(datafeedConfig1.getIndexes()));
assertThat(updatedDatafeed.getIndices(), equalTo(datafeedConfig1.getIndices()));
assertThat(updatedDatafeed.getTypes(), equalTo(datafeedConfig1.getTypes()));
assertThat(updatedDatafeed.getScrollSize(), equalTo(5000));
}

View File

@ -26,7 +26,7 @@ public class PutDatafeedActionRequestTests extends AbstractStreamableXContentTes
@Override
protected Request createTestInstance() {
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, randomAlphaOfLength(10));
datafeedConfig.setIndexes(Arrays.asList(randomAlphaOfLength(10)));
datafeedConfig.setIndices(Arrays.asList(randomAlphaOfLength(10)));
datafeedConfig.setTypes(Arrays.asList(randomAlphaOfLength(10)));
return new Request(datafeedConfig.build());
}

View File

@ -18,7 +18,7 @@ public class PutDatafeedActionResponseTests extends AbstractStreamableTestCase<R
protected Response createTestInstance() {
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(
DatafeedConfigTests.randomValidDatafeedId(), randomAlphaOfLength(10));
datafeedConfig.setIndexes(Arrays.asList(randomAlphaOfLength(10)));
datafeedConfig.setIndices(Arrays.asList(randomAlphaOfLength(10)));
datafeedConfig.setTypes(Arrays.asList(randomAlphaOfLength(10)));
return new Response(randomBoolean(), datafeedConfig.build());
}

View File

@ -45,7 +45,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public static DatafeedConfig createRandomizedDatafeedConfig(String jobId, long bucketSpanMillis) {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder(randomValidDatafeedId(), jobId);
builder.setIndexes(randomStringList(1, 10));
builder.setIndices(randomStringList(1, 10));
builder.setTypes(randomStringList(1, 10));
if (randomBoolean()) {
builder.setQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10)));
@ -118,47 +118,47 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testFillDefaults() {
DatafeedConfig.Builder expectedDatafeedConfig = new DatafeedConfig.Builder("datafeed1", "job1");
expectedDatafeedConfig.setIndexes(Arrays.asList("index"));
expectedDatafeedConfig.setIndices(Arrays.asList("index"));
expectedDatafeedConfig.setTypes(Arrays.asList("type"));
expectedDatafeedConfig.setQueryDelay(TimeValue.timeValueMinutes(1));
expectedDatafeedConfig.setScrollSize(1000);
DatafeedConfig.Builder defaultedDatafeedConfig = new DatafeedConfig.Builder("datafeed1", "job1");
defaultedDatafeedConfig.setIndexes(Arrays.asList("index"));
defaultedDatafeedConfig.setIndices(Arrays.asList("index"));
defaultedDatafeedConfig.setTypes(Arrays.asList("type"));
assertEquals(expectedDatafeedConfig.build(), defaultedDatafeedConfig.build());
}
public void testCheckValid_GivenNullIndexes() throws IOException {
public void testCheckValid_GivenNullIndices() throws IOException {
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
expectThrows(IllegalArgumentException.class, () -> conf.setIndexes(null));
expectThrows(IllegalArgumentException.class, () -> conf.setIndices(null));
}
public void testCheckValid_GivenEmptyIndexes() throws IOException {
public void testCheckValid_GivenEmptyIndices() throws IOException {
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
conf.setIndexes(Collections.emptyList());
conf.setIndices(Collections.emptyList());
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indexes", "[]"), e.getMessage());
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indices", "[]"), e.getMessage());
}
public void testCheckValid_GivenIndexesContainsOnlyNulls() throws IOException {
List<String> indexes = new ArrayList<>();
indexes.add(null);
indexes.add(null);
public void testCheckValid_GivenIndicesContainsOnlyNulls() throws IOException {
List<String> indices = new ArrayList<>();
indices.add(null);
indices.add(null);
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
conf.setIndexes(indexes);
conf.setIndices(indices);
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indexes", "[null, null]"), e.getMessage());
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indices", "[null, null]"), e.getMessage());
}
public void testCheckValid_GivenIndexesContainsOnlyEmptyStrings() throws IOException {
List<String> indexes = new ArrayList<>();
indexes.add("");
indexes.add("");
public void testCheckValid_GivenIndicesContainsOnlyEmptyStrings() throws IOException {
List<String> indices = new ArrayList<>();
indices.add("");
indices.add("");
DatafeedConfig.Builder conf = new DatafeedConfig.Builder("datafeed1", "job1");
conf.setIndexes(indexes);
conf.setIndices(indices);
IllegalArgumentException e = ESTestCase.expectThrows(IllegalArgumentException.class, conf::build);
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indexes", "[, ]"), e.getMessage());
assertEquals(Messages.getMessage(Messages.DATAFEED_CONFIG_INVALID_OPTION_VALUE, "indices", "[, ]"), e.getMessage());
}
public void testCheckValid_GivenNegativeQueryDelay() throws IOException {
@ -189,7 +189,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testBuild_GivenScriptFieldsAndAggregations() {
DatafeedConfig.Builder datafeed = new DatafeedConfig.Builder("datafeed1", "job1");
datafeed.setIndexes(Arrays.asList("my_index"));
datafeed.setIndices(Arrays.asList("my_index"));
datafeed.setTypes(Arrays.asList("my_type"));
datafeed.setScriptFields(Arrays.asList(new SearchSourceBuilder.ScriptField(randomAlphaOfLength(10),
mockScript(randomAlphaOfLength(10)), randomBoolean())));
@ -202,7 +202,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testHasAggregations_GivenNull() {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
builder.setIndexes(Arrays.asList("myIndex"));
builder.setIndices(Arrays.asList("myIndex"));
builder.setTypes(Arrays.asList("myType"));
DatafeedConfig datafeedConfig = builder.build();
@ -211,7 +211,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testHasAggregations_NonEmpty() {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
builder.setIndexes(Arrays.asList("myIndex"));
builder.setIndices(Arrays.asList("myIndex"));
builder.setTypes(Arrays.asList("myType"));
builder.setAggregations(new AggregatorFactories.Builder().addAggregator(
AggregationBuilders.dateHistogram("time").interval(300000)));
@ -222,7 +222,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testBuild_GivenEmptyAggregations() {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
builder.setIndexes(Arrays.asList("myIndex"));
builder.setIndices(Arrays.asList("myIndex"));
builder.setTypes(Arrays.asList("myType"));
builder.setAggregations(new AggregatorFactories.Builder());
@ -233,7 +233,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testBuild_GivenTopLevelAggIsTerms() {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
builder.setIndexes(Arrays.asList("myIndex"));
builder.setIndices(Arrays.asList("myIndex"));
builder.setTypes(Arrays.asList("myType"));
builder.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.terms("foo")));
@ -244,7 +244,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
public void testBuild_GivenHistogramWithDefaultInterval() {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
builder.setIndexes(Arrays.asList("myIndex"));
builder.setIndices(Arrays.asList("myIndex"));
builder.setTypes(Arrays.asList("myType"));
builder.setAggregations(new AggregatorFactories.Builder().addAggregator(AggregationBuilders.histogram("time")));
@ -329,7 +329,7 @@ public class DatafeedConfigTests extends AbstractSerializingTestCase<DatafeedCon
private static DatafeedConfig createDatafeedWithDateHistogram(DateHistogramAggregationBuilder dateHistogram) {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("datafeed1", "job1");
builder.setIndexes(Arrays.asList("myIndex"));
builder.setIndices(Arrays.asList("myIndex"));
builder.setTypes(Arrays.asList("myType"));
builder.setAggregations(new AggregatorFactories.Builder().addAggregator(dateHistogram));
return builder.build();

View File

@ -145,7 +145,7 @@ public class DatafeedJobValidatorTests extends ESTestCase {
private static DatafeedConfig.Builder createValidDatafeedConfig() {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder("my-datafeed", "my-job");
builder.setIndexes(Collections.singletonList("myIndex"));
builder.setIndices(Collections.singletonList("myIndex"));
builder.setTypes(Collections.singletonList("myType"));
return builder;
}

View File

@ -356,7 +356,7 @@ public class DatafeedManagerTests extends ESTestCase {
public static DatafeedConfig.Builder createDatafeedConfig(String datafeedId, String jobId) {
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, jobId);
datafeedConfig.setIndexes(Arrays.asList("myIndex"));
datafeedConfig.setIndices(Arrays.asList("myIndex"));
datafeedConfig.setTypes(Arrays.asList("myType"));
return datafeedConfig;
}

View File

@ -40,7 +40,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
builder.setFrequency(TimeValue.timeValueSeconds(randomIntBetween(1, Integer.MAX_VALUE)));
}
if (randomBoolean()) {
builder.setIndexes(DatafeedConfigTests.randomStringList(1, 10));
builder.setIndices(DatafeedConfigTests.randomStringList(1, 10));
}
if (randomBoolean()) {
builder.setTypes(DatafeedConfigTests.randomStringList(1, 10));
@ -114,13 +114,13 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
public void testApply_givenFullUpdateNoAggregations() {
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("foo", "foo-feed");
datafeedBuilder.setIndexes(Arrays.asList("i_1"));
datafeedBuilder.setIndices(Arrays.asList("i_1"));
datafeedBuilder.setTypes(Arrays.asList("t_1"));
DatafeedConfig datafeed = datafeedBuilder.build();
DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeed.getId());
update.setJobId("bar");
update.setIndexes(Arrays.asList("i_2"));
update.setIndices(Arrays.asList("i_2"));
update.setTypes(Arrays.asList("t_2"));
update.setQueryDelay(TimeValue.timeValueSeconds(42));
update.setFrequency(TimeValue.timeValueSeconds(142));
@ -133,7 +133,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
DatafeedConfig updatedDatafeed = update.build().apply(datafeed);
assertThat(updatedDatafeed.getJobId(), equalTo("bar"));
assertThat(updatedDatafeed.getIndexes(), equalTo(Arrays.asList("i_2")));
assertThat(updatedDatafeed.getIndices(), equalTo(Arrays.asList("i_2")));
assertThat(updatedDatafeed.getTypes(), equalTo(Arrays.asList("t_2")));
assertThat(updatedDatafeed.getQueryDelay(), equalTo(TimeValue.timeValueSeconds(42)));
assertThat(updatedDatafeed.getFrequency(), equalTo(TimeValue.timeValueSeconds(142)));
@ -148,7 +148,7 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
public void testApply_givenAggregations() {
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder("foo", "foo-feed");
datafeedBuilder.setIndexes(Arrays.asList("i_1"));
datafeedBuilder.setIndices(Arrays.asList("i_1"));
datafeedBuilder.setTypes(Arrays.asList("t_1"));
DatafeedConfig datafeed = datafeedBuilder.build();
@ -158,10 +158,10 @@ public class DatafeedUpdateTests extends AbstractSerializingTestCase<DatafeedUpd
DatafeedConfig updatedDatafeed = update.build().apply(datafeed);
assertThat(updatedDatafeed.getIndexes(), equalTo(Arrays.asList("i_1")));
assertThat(updatedDatafeed.getIndices(), equalTo(Arrays.asList("i_1")));
assertThat(updatedDatafeed.getTypes(), equalTo(Arrays.asList("t_1")));
assertThat(updatedDatafeed.getAggregations(),
equalTo(new AggregatorFactories.Builder().addAggregator(
AggregationBuilders.histogram("a").interval(300000))));
}
}
}

View File

@ -50,7 +50,7 @@ public class AggregationDataExtractorTests extends ESTestCase {
private String jobId;
private String timeField;
private List<String> types;
private List<String> indexes;
private List<String> indices;
private QueryBuilder query;
private AggregatorFactories.Builder aggs;
@ -79,7 +79,7 @@ public class AggregationDataExtractorTests extends ESTestCase {
capturedSearchRequests = new ArrayList<>();
jobId = "test-job";
timeField = "time";
indexes = Arrays.asList("index-1", "index-2");
indices = Arrays.asList("index-1", "index-2");
types = Arrays.asList("type-1", "type-2");
query = QueryBuilders.matchAllQuery();
aggs = new AggregatorFactories.Builder()
@ -270,7 +270,7 @@ public class AggregationDataExtractorTests extends ESTestCase {
}
private AggregationDataExtractorContext createContext(long start, long end) {
return new AggregationDataExtractorContext(jobId, timeField, indexes, types, query, aggs, start, end, true);
return new AggregationDataExtractorContext(jobId, timeField, indices, types, query, aggs, start, end, true);
}
@SuppressWarnings("unchecked")

View File

@ -48,7 +48,7 @@ public class ChunkedDataExtractorTests extends ESTestCase {
private String jobId;
private String timeField;
private List<String> types;
private List<String> indexes;
private List<String> indices;
private QueryBuilder query;
private int scrollSize;
private TimeValue chunkSpan;
@ -79,7 +79,7 @@ public class ChunkedDataExtractorTests extends ESTestCase {
capturedSearchRequests = new ArrayList<>();
jobId = "test-job";
timeField = "time";
indexes = Arrays.asList("index-1", "index-2");
indices = Arrays.asList("index-1", "index-2");
types = Arrays.asList("type-1", "type-2");
query = QueryBuilders.matchAllQuery();
scrollSize = 1000;
@ -445,7 +445,7 @@ public class ChunkedDataExtractorTests extends ESTestCase {
}
private ChunkedDataExtractorContext createContext(long start, long end) {
return new ChunkedDataExtractorContext(jobId, timeField, indexes, types, query, scrollSize, start, end, chunkSpan);
return new ChunkedDataExtractorContext(jobId, timeField, indices, types, query, scrollSize, start, end, chunkSpan);
}
private static class StubSubExtractor implements DataExtractor {

View File

@ -51,7 +51,7 @@ public class ScrollDataExtractorTests extends ESTestCase {
private String jobId;
private ExtractedFields extractedFields;
private List<String> types;
private List<String> indexes;
private List<String> indices;
private QueryBuilder query;
private List<SearchSourceBuilder.ScriptField> scriptFields;
private int scrollSize;
@ -100,7 +100,7 @@ public class ScrollDataExtractorTests extends ESTestCase {
ExtractedField timeField = ExtractedField.newField("time", ExtractedField.ExtractionMethod.DOC_VALUE);
extractedFields = new ExtractedFields(timeField,
Arrays.asList(timeField, ExtractedField.newField("field_1", ExtractedField.ExtractionMethod.DOC_VALUE)));
indexes = Arrays.asList("index-1", "index-2");
indices = Arrays.asList("index-1", "index-2");
types = Arrays.asList("type-1", "type-2");
query = QueryBuilders.matchAllQuery();
scriptFields = Collections.emptyList();
@ -285,7 +285,7 @@ public class ScrollDataExtractorTests extends ESTestCase {
"script2", new Script(ScriptType.INLINE, "painless", "return domainSplit('foo.com', params);", emptyMap()), false);
List<SearchSourceBuilder.ScriptField> sFields = Arrays.asList(withoutSplit, withSplit);
ScrollDataExtractorContext context = new ScrollDataExtractorContext(jobId, extractedFields, indexes,
ScrollDataExtractorContext context = new ScrollDataExtractorContext(jobId, extractedFields, indices,
types, query, sFields, scrollSize, 1000, 2000);
TestDataExtractor extractor = new TestDataExtractor(context);
@ -332,7 +332,7 @@ public class ScrollDataExtractorTests extends ESTestCase {
}
private ScrollDataExtractorContext createContext(long start, long end) {
return new ScrollDataExtractorContext(jobId, extractedFields, indexes, types, query, scriptFields, scrollSize, start, end);
return new ScrollDataExtractorContext(jobId, extractedFields, indices, types, query, scriptFields, scrollSize, start, end);
}
private SearchResponse createEmptySearchResponse() {

View File

@ -89,7 +89,7 @@ public class CategorizationIT extends MlNativeAutodetectIntegTestCase {
String datafeedId = job.getId() + "-feed";
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, job.getId());
datafeedConfig.setIndexes(Arrays.asList(DATA_INDEX));
datafeedConfig.setIndices(Arrays.asList(DATA_INDEX));
datafeedConfig.setTypes(Arrays.asList(DATA_TYPE));
DatafeedConfig datafeed = datafeedConfig.build();
registerDatafeed(datafeed);
@ -135,7 +135,7 @@ public class CategorizationIT extends MlNativeAutodetectIntegTestCase {
String datafeedId = job.getId() + "-feed";
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, job.getId());
datafeedConfig.setIndexes(Arrays.asList(DATA_INDEX));
datafeedConfig.setIndices(Arrays.asList(DATA_INDEX));
datafeedConfig.setTypes(Arrays.asList(DATA_TYPE));
DatafeedConfig datafeed = datafeedConfig.build();
registerDatafeed(datafeed);

View File

@ -265,7 +265,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
assertThat(e.getMessage(), containsString("Cannot create datafeed"));
assertThat(e.getMessage(),
containsString("user ml_admin lacks permissions on the indexes to be searched"));
containsString("user ml_admin lacks permissions on the indices to be searched"));
}
public void testInsufficientSearchPrivilegesOnPreview() throws Exception {

View File

@ -95,7 +95,7 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase {
String datafeedId = job.getId() + "-feed";
DatafeedConfig.Builder datafeedConfig = new DatafeedConfig.Builder(datafeedId, job.getId());
datafeedConfig.setIndexes(Arrays.asList(DATA_INDEX));
datafeedConfig.setIndices(Arrays.asList(DATA_INDEX));
datafeedConfig.setTypes(Arrays.asList(DATA_TYPE));
DatafeedConfig datafeed = datafeedConfig.build();
registerDatafeed(datafeed);

View File

@ -164,15 +164,15 @@ public abstract class BaseMlIntegTestCase extends ESIntegTestCase {
return builder;
}
public static DatafeedConfig createDatafeed(String datafeedId, String jobId, List<String> indexes) {
return createDatafeedBuilder(datafeedId, jobId, indexes).build();
public static DatafeedConfig createDatafeed(String datafeedId, String jobId, List<String> indices) {
return createDatafeedBuilder(datafeedId, jobId, indices).build();
}
public static DatafeedConfig.Builder createDatafeedBuilder(String datafeedId, String jobId, List<String> indexes) {
public static DatafeedConfig.Builder createDatafeedBuilder(String datafeedId, String jobId, List<String> indices) {
DatafeedConfig.Builder builder = new DatafeedConfig.Builder(datafeedId, jobId);
builder.setQueryDelay(TimeValue.timeValueSeconds(1));
builder.setFrequency(TimeValue.timeValueSeconds(2));
builder.setIndexes(indexes);
builder.setIndices(indices);
builder.setTypes(Collections.singletonList("type"));
return builder;
}

View File

@ -71,7 +71,7 @@ setup:
}
- match: { datafeed_id: "test-datafeed-1" }
- match: { job_id: "datafeeds-crud-1" }
- match: { indexes: ["index-foo"] }
- match: { indices: ["index-foo"] }
- match: { types: ["type-bar"] }
- match: { scroll_size: 1000 }
- is_true: query.match_all
@ -166,7 +166,7 @@ setup:
}
- match: { datafeed_id: "test-datafeed-1" }
- match: { job_id: "datafeeds-crud-1" }
- match: { indexes: ["index-*"] }
- match: { indices: ["index-*"] }
- match: { types: ["type-bar"] }
- match: { scroll_size: 10000 }
- match: { frequency: "2m" }
@ -194,7 +194,7 @@ setup:
}
- match: { datafeed_id: "test-datafeed-1" }
- match: { job_id: "datafeeds-crud-2" }
- match: { indexes: ["index-foo"] }
- match: { indices: ["index-foo"] }
- match: { types: ["type-bar"] }
---
@ -275,7 +275,7 @@ setup:
body: >
{
"job_id":"datafeeds-crud-1",
"indexes":["index-foo"],
"indices":["index-foo"],
"types":["type-bar"],
"chunking_config": {"mode":"manual","time_span": "1h"}
}