Merge branch 'master' into feature/sql
Original commit: elastic/x-pack-elasticsearch@f789ea33ab
This commit is contained in:
commit
9a08c93f38
|
@ -101,12 +101,22 @@ analyzed.
|
|||
|
||||
[float]
|
||||
=== Time-based index patterns are not supported
|
||||
//See x-pack-elasticsearch/#1910
|
||||
|
||||
It is not possible to create an {xpackml} analysis job that uses time-based
|
||||
index patterns, for example `[logstash-]YYYY.MM.DD`.
|
||||
index patterns, for example `[logstash-]YYYY.MM.DD`.
|
||||
This applies to the single metric or multi metric job creation wizards in {kib}.
|
||||
|
||||
|
||||
[float]
|
||||
=== Fields named "by", "count", or "over" cannot be used to split data
|
||||
//See x-pack-elasticsearch/#858
|
||||
|
||||
You cannot use the following field names in the `by_field_name` or
|
||||
`over_field_name` properties in a job: `by`; `count`; `over`. This limitation
|
||||
also applies to those properties when you create advanced jobs in {kib}.
|
||||
|
||||
|
||||
[float]
|
||||
=== Jobs created in {kib} use model plot config and pre-aggregated data
|
||||
//See x-pack-elasticsearch/#844
|
||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.job.persistence;
|
|||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.DocWriteResponse.Result;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
|
@ -328,7 +329,7 @@ public class JobResultsPersister extends AbstractComponent {
|
|||
} catch (IOException e) {
|
||||
logger.error(new ParameterizedMessage("[{}] Error writing [{}]", jobId, (id == null) ? "auto-generated ID" : id), e);
|
||||
IndexResponse.Builder notCreatedResponse = new IndexResponse.Builder();
|
||||
notCreatedResponse.setCreated(false);
|
||||
notCreatedResponse.setResult(Result.NOOP);
|
||||
listener.onResponse(notCreatedResponse.build());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -97,20 +97,49 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"kibana_settings": {
|
||||
"search": {
|
||||
"request": {
|
||||
"search_type": "query_then_fetch",
|
||||
"indices": [
|
||||
".monitoring-kibana-6-*"
|
||||
],
|
||||
"body": {
|
||||
"size": 1,
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": {
|
||||
"term": {
|
||||
"type": "kibana_settings"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
{
|
||||
"timestamp": {
|
||||
"order": "desc"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"condition": {
|
||||
"script": {
|
||||
"source":
|
||||
"ctx.vars.fails_check = ctx.payload.check.hits.total != 0 && ctx.payload.check.hits.hits[0]._source.cluster_state.status != 'green';ctx.vars.not_resolved = ctx.payload.alert.hits.total == 1 && ctx.payload.alert.hits.hits[0]._source.resolved_timestamp == null;return ctx.vars.fails_check || ctx.vars.not_resolved"
|
||||
"source": "ctx.vars.fails_check = ctx.payload.check.hits.total != 0 && ctx.payload.check.hits.hits[0]._source.cluster_state.status != 'green';ctx.vars.not_resolved = ctx.payload.alert.hits.total == 1 && ctx.payload.alert.hits.hits[0]._source.resolved_timestamp == null;return ctx.vars.fails_check || ctx.vars.not_resolved"
|
||||
}
|
||||
},
|
||||
"transform": {
|
||||
"script": {
|
||||
"source":
|
||||
"def state = 'red';if (ctx.vars.fails_check){state = ctx.payload.check.hits.hits[0]._source.cluster_state.status;}if (ctx.vars.not_resolved){ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check == false) {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = ['timestamp': ctx.execution_time, 'metadata': ctx.metadata.xpack];}if (ctx.vars.fails_check) {ctx.payload.prefix = 'Elasticsearch cluster status is ' + state + '.';if (state == 'red') {ctx.payload.message = 'Allocate missing primary shards and replica shards.';ctx.payload.metadata.severity = 2100;} else {ctx.payload.message = 'Allocate missing replica shards.';ctx.payload.metadata.severity = 1100;}}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;"
|
||||
"source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.defaultAdminEmail : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def state = ctx.payload.check.hits.hits[0]._source.cluster_state.status;if (ctx.vars.not_resolved){ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check == false) {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = ['timestamp': ctx.execution_time, 'metadata': ctx.metadata.xpack];}if (ctx.vars.fails_check) {ctx.payload.prefix = 'Elasticsearch cluster status is ' + state + '.';if (state == 'red') {ctx.payload.message = 'Allocate missing primary shards and replica shards.';ctx.payload.metadata.severity = 2100;} else {ctx.payload.message = 'Allocate missing replica shards.';ctx.payload.metadata.severity = 1100;}}ctx.vars.state = state.toUpperCase();ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;"
|
||||
}
|
||||
},
|
||||
"actions": {
|
||||
|
@ -120,6 +149,19 @@
|
|||
"doc_type": "doc",
|
||||
"doc_id": "${monitoring.watch.unique_id}"
|
||||
}
|
||||
},
|
||||
"send_email_to_admin": {
|
||||
"condition": {
|
||||
"script": "return ctx.vars.email_recipient != null && (ctx.vars.is_new || ctx.vars.is_resolved)"
|
||||
},
|
||||
"email": {
|
||||
"to": "X-Pack Admin <{{ctx.vars.email_recipient}}>",
|
||||
"from": "X-Pack Admin <{{ctx.vars.email_recipient}}>",
|
||||
"subject": "[{{#ctx.vars.is_new}}NEW{{/ctx.vars.is_new}}{{#ctx.vars.is_resolved}}RESOLVED{{/ctx.vars.is_resolved}}] {{ctx.metadata.name}} [{{ctx.vars.state}}]",
|
||||
"body": {
|
||||
"text": "{{#ctx.vars.is_resolved}}This cluster alert has been resolved: {{/ctx.vars.is_resolved}}{{ctx.payload.prefix}} {{ctx.payload.message}}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -93,6 +93,37 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"kibana_settings": {
|
||||
"search": {
|
||||
"request": {
|
||||
"search_type": "query_then_fetch",
|
||||
"indices": [
|
||||
".monitoring-kibana-6-*"
|
||||
],
|
||||
"body": {
|
||||
"size": 1,
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": {
|
||||
"term": {
|
||||
"type": "kibana_settings"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
{
|
||||
"timestamp": {
|
||||
"order": "desc"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -104,7 +135,7 @@
|
|||
},
|
||||
"transform": {
|
||||
"script": {
|
||||
"source": "def versionMessage = null;if (ctx.vars.fails_check) {def versions = new ArrayList(ctx.payload.check.hits.hits[0]._source.cluster_stats.nodes.versions);Collections.sort(versions);versionMessage = 'Versions: [' + String.join(', ', versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Elasticsearch.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;"
|
||||
"source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.defaultAdminEmail : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def versionMessage = null;if (ctx.vars.fails_check) {def versions = new ArrayList(ctx.payload.check.hits.hits[0]._source.cluster_stats.nodes.versions);Collections.sort(versions);versionMessage = 'Versions: [' + String.join(', ', versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Elasticsearch.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;"
|
||||
}
|
||||
},
|
||||
"actions": {
|
||||
|
@ -114,6 +145,19 @@
|
|||
"doc_type": "doc",
|
||||
"doc_id": "${monitoring.watch.unique_id}"
|
||||
}
|
||||
},
|
||||
"send_email_to_admin": {
|
||||
"condition": {
|
||||
"script": "return ctx.vars.email_recipient != null && (ctx.vars.is_new || ctx.vars.is_resolved)"
|
||||
},
|
||||
"email": {
|
||||
"to": "X-Pack Admin <{{ctx.vars.email_recipient}}>",
|
||||
"from": "X-Pack Admin <{{ctx.vars.email_recipient}}>",
|
||||
"subject": "[{{#ctx.vars.is_new}}NEW{{/ctx.vars.is_new}}{{#ctx.vars.is_resolved}}RESOLVED{{/ctx.vars.is_resolved}}] {{ctx.metadata.name}}",
|
||||
"body": {
|
||||
"text": "{{#ctx.vars.is_resolved}}This cluster alert has been resolved: {{/ctx.vars.is_resolved}}{{ctx.payload.prefix}} {{ctx.payload.message}}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -120,6 +120,37 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"kibana_settings": {
|
||||
"search": {
|
||||
"request": {
|
||||
"search_type": "query_then_fetch",
|
||||
"indices": [
|
||||
".monitoring-kibana-6-*"
|
||||
],
|
||||
"body": {
|
||||
"size": 1,
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": {
|
||||
"term": {
|
||||
"type": "kibana_settings"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
{
|
||||
"timestamp": {
|
||||
"order": "desc"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -131,7 +162,7 @@
|
|||
},
|
||||
"transform": {
|
||||
"script": {
|
||||
"source": "def versionMessage = null;if (ctx.vars.fails_check) {versionMessage = 'Versions: [' + String.join(', ', ctx.vars.versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Kibana.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;"
|
||||
"source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.defaultAdminEmail : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def versionMessage = null;if (ctx.vars.fails_check) {versionMessage = 'Versions: [' + String.join(', ', ctx.vars.versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Kibana.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;"
|
||||
}
|
||||
},
|
||||
"actions": {
|
||||
|
@ -141,6 +172,19 @@
|
|||
"doc_type": "doc",
|
||||
"doc_id": "${monitoring.watch.unique_id}"
|
||||
}
|
||||
},
|
||||
"send_email_to_admin": {
|
||||
"condition": {
|
||||
"script": "return ctx.vars.email_recipient != null && (ctx.vars.is_new || ctx.vars.is_resolved)"
|
||||
},
|
||||
"email": {
|
||||
"to": "X-Pack Admin <{{ctx.vars.email_recipient}}>",
|
||||
"from": "X-Pack Admin <{{ctx.vars.email_recipient}}>",
|
||||
"subject": "[{{#ctx.vars.is_new}}NEW{{/ctx.vars.is_new}}{{#ctx.vars.is_resolved}}RESOLVED{{/ctx.vars.is_resolved}}] {{ctx.metadata.name}}",
|
||||
"body": {
|
||||
"text": "{{#ctx.vars.is_resolved}}This cluster alert has been resolved: {{/ctx.vars.is_resolved}}{{ctx.payload.prefix}} {{ctx.payload.message}}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -120,6 +120,37 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"kibana_settings": {
|
||||
"search": {
|
||||
"request": {
|
||||
"search_type": "query_then_fetch",
|
||||
"indices": [
|
||||
".monitoring-kibana-6-*"
|
||||
],
|
||||
"body": {
|
||||
"size": 1,
|
||||
"query": {
|
||||
"bool": {
|
||||
"filter": {
|
||||
"term": {
|
||||
"type": "kibana_settings"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
{
|
||||
"timestamp": {
|
||||
"order": "desc"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -131,7 +162,7 @@
|
|||
},
|
||||
"transform": {
|
||||
"script": {
|
||||
"source": "def versionMessage = null;if (ctx.vars.fails_check) {versionMessage = 'Versions: [' + String.join(', ', ctx.vars.versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Logstash.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;"
|
||||
"source": "ctx.vars.email_recipient = (ctx.payload.kibana_settings.hits.total > 0) ? ctx.payload.kibana_settings.hits.hits[0]._source.kibana_settings.xpack.defaultAdminEmail : null;ctx.vars.is_new = ctx.vars.fails_check && !ctx.vars.not_resolved;ctx.vars.is_resolved = !ctx.vars.fails_check && ctx.vars.not_resolved;def versionMessage = null;if (ctx.vars.fails_check) {versionMessage = 'Versions: [' + String.join(', ', ctx.vars.versions) + '].';}if (ctx.vars.not_resolved) {ctx.payload = ctx.payload.alert.hits.hits[0]._source;if (ctx.vars.fails_check) {ctx.payload.message = versionMessage;} else {ctx.payload.resolved_timestamp = ctx.execution_time;}} else {ctx.payload = [ 'timestamp': ctx.execution_time, 'prefix': 'This cluster is running with multiple versions of Logstash.', 'message': versionMessage, 'metadata': ctx.metadata.xpack ];}ctx.payload.update_timestamp = ctx.execution_time;return ctx.payload;"
|
||||
}
|
||||
},
|
||||
"actions": {
|
||||
|
@ -141,6 +172,19 @@
|
|||
"doc_type": "doc",
|
||||
"doc_id": "${monitoring.watch.unique_id}"
|
||||
}
|
||||
},
|
||||
"send_email_to_admin": {
|
||||
"condition": {
|
||||
"script": "return ctx.vars.email_recipient != null && (ctx.vars.is_new || ctx.vars.is_resolved)"
|
||||
},
|
||||
"email": {
|
||||
"to": "X-Pack Admin <{{ctx.vars.email_recipient}}>",
|
||||
"from": "X-Pack Admin <{{ctx.vars.email_recipient}}>",
|
||||
"subject": "[{{#ctx.vars.is_new}}NEW{{/ctx.vars.is_new}}{{#ctx.vars.is_resolved}}RESOLVED{{/ctx.vars.is_resolved}}] {{ctx.metadata.name}}",
|
||||
"body": {
|
||||
"text": "{{#ctx.vars.is_resolved}}This cluster alert has been resolved: {{/ctx.vars.is_resolved}}{{ctx.payload.prefix}} {{ctx.payload.message}}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -100,11 +100,10 @@ public abstract class AbstractOldXPackIndicesBackwardsCompatibilityTestCase exte
|
|||
public void testAllVersionsTested() throws Exception {
|
||||
SortedSet<String> expectedVersions = new TreeSet<>();
|
||||
for (Version v : VersionUtils.allReleasedVersions()) {
|
||||
if (false == shouldTestVersion(v)) continue;
|
||||
if (v.before(Version.CURRENT.minimumIndexCompatibilityVersion())) continue; // we can only support one major version backward
|
||||
if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself
|
||||
if (v.isBeta() == true || v.isAlpha() == true || v.isRC() == true) continue; // don't check alphas etc
|
||||
expectedVersions.add("x-pack-" + v.toString() + ".zip");
|
||||
if (v.isRelease()) {
|
||||
// no guarantees for prereleases
|
||||
expectedVersions.add("x-pack-" + v.toString() + ".zip");
|
||||
}
|
||||
}
|
||||
expectedVersions.removeAll(dataFiles);
|
||||
if (expectedVersions.isEmpty() == false) {
|
||||
|
@ -121,7 +120,6 @@ public abstract class AbstractOldXPackIndicesBackwardsCompatibilityTestCase exte
|
|||
Collections.shuffle(dataFiles, random());
|
||||
for (String dataFile : dataFiles) {
|
||||
Version version = Version.fromString(dataFile.replace("x-pack-", "").replace(".zip", ""));
|
||||
if (false == shouldTestVersion(version)) continue;
|
||||
long clusterStartTime = System.nanoTime();
|
||||
setupCluster(dataFile);
|
||||
ensureYellow();
|
||||
|
@ -137,13 +135,6 @@ public abstract class AbstractOldXPackIndicesBackwardsCompatibilityTestCase exte
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Should we test this version at all? Called before loading the data directory. Return false to skip it entirely.
|
||||
*/
|
||||
protected boolean shouldTestVersion(Version version) {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Actually test this version.
|
||||
*/
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"xpack.graph.explore": {
|
||||
"documentation": "https://www.elastic.co/guide/en/x-pack/current/graph-api-explore.html",
|
||||
"documentation": "https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html",
|
||||
"methods": ["GET", "POST"],
|
||||
"url": {
|
||||
"path": "/{index}/_xpack/graph/_explore",
|
||||
|
|
Loading…
Reference in New Issue