Merge remote-tracking branch 'remotes/upstream/master' into feature/sql_2
Original commit: elastic/x-pack-elasticsearch@2c78f17486
This commit is contained in:
commit
448511e093
|
@ -79,6 +79,129 @@ image::images/ml-category-advanced.jpg["Advanced job configuration options relat
|
|||
NOTE: To add the `categorization_examples_limit` property, you must use the
|
||||
**Edit JSON** tab and copy the `analysis_limits` object from the API example.
|
||||
|
||||
It is possible to customize the way the categorization field values are interpreted
|
||||
to an even greater extent:
|
||||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/it_ops_new_logs2
|
||||
{
|
||||
"description" : "IT Ops Application Logs",
|
||||
"analysis_config" : {
|
||||
"categorization_field_name": "message",
|
||||
"bucket_span":"30m",
|
||||
"detectors" :[{
|
||||
"function":"count",
|
||||
"by_field_name": "mlcategory",
|
||||
"detector_description": "Unusual message counts"
|
||||
}],
|
||||
"categorization_analyzer":{
|
||||
"char_filter": [
|
||||
{ "type": "pattern_replace", "pattern": "\\[statement:.*\\]" } <1>
|
||||
],
|
||||
"tokenizer": "ml_classic", <2>
|
||||
"filter": [
|
||||
{ "type" : "stop", "stopwords": [
|
||||
"Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday",
|
||||
"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun",
|
||||
"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December",
|
||||
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec",
|
||||
"GMT", "UTC"
|
||||
] } <3>
|
||||
]
|
||||
}
|
||||
},
|
||||
"analysis_limits":{
|
||||
"categorization_examples_limit": 5
|
||||
},
|
||||
"data_description" : {
|
||||
"time_field":"time",
|
||||
"time_format": "epoch_ms"
|
||||
}
|
||||
}
|
||||
----------------------------------
|
||||
//CONSOLE
|
||||
<1> The
|
||||
{ref}/analysis-pattern-replace-charfilter.html[`pattern_replace` character filter]
|
||||
here achieves exactly the same as the `categorization_filters` in the first
|
||||
example.
|
||||
<2> The `ml_classic` tokenizer works like the non-customizable tokenization
|
||||
that was used for categorization in older versions of machine learning. Use
|
||||
it if you want the same categorization behavior as older versions.
|
||||
<3> English day/month words are filtered by default from log messages
|
||||
before categorization. If your logs are in a different language and contain
|
||||
dates then you may get better results by filtering day/month words in your
|
||||
language.
|
||||
|
||||
The optional `categorization_analyzer` property allows even greater customization
|
||||
of how categorization interprets the categorization field value. It can refer to
|
||||
a built-in Elasticsearch analyzer, or a combination of zero or more character
|
||||
filters, a tokenizer, and zero or more token filters.
|
||||
|
||||
The `ml_classic` tokenizer and the day/month stopword filter are more-or-less
|
||||
equivalent to the following analyzer defined using only built-in Elasticsearch
|
||||
{ref}/analysis-tokenizers.html[tokenizers] and
|
||||
{ref}/analysis-tokenfilters.html[token filters]:
|
||||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/it_ops_new_logs3
|
||||
{
|
||||
"description" : "IT Ops Application Logs",
|
||||
"analysis_config" : {
|
||||
"categorization_field_name": "message",
|
||||
"bucket_span":"30m",
|
||||
"detectors" :[{
|
||||
"function":"count",
|
||||
"by_field_name": "mlcategory",
|
||||
"detector_description": "Unusual message counts"
|
||||
}],
|
||||
"categorization_analyzer":{
|
||||
"tokenizer": {
|
||||
"type" : "simple_pattern_split",
|
||||
"pattern" : "[^-0-9A-Za-z_.]+" <1>
|
||||
},
|
||||
"filter": [
|
||||
{ "type" : "pattern_replace", "pattern": "^[0-9].*" }, <2>
|
||||
{ "type" : "pattern_replace", "pattern": "^[-0-9A-Fa-f.]+$" }, <3>
|
||||
{ "type" : "pattern_replace", "pattern": "^[^0-9A-Za-z]+" }, <4>
|
||||
{ "type" : "pattern_replace", "pattern": "[^0-9A-Za-z]+$" }, <5>
|
||||
{ "type" : "stop", "stopwords": [
|
||||
"",
|
||||
"Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday",
|
||||
"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun",
|
||||
"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December",
|
||||
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec",
|
||||
"GMT", "UTC"
|
||||
] }
|
||||
]
|
||||
}
|
||||
},
|
||||
"analysis_limits":{
|
||||
"categorization_examples_limit": 5
|
||||
},
|
||||
"data_description" : {
|
||||
"time_field":"time",
|
||||
"time_format": "epoch_ms"
|
||||
}
|
||||
}
|
||||
----------------------------------
|
||||
//CONSOLE
|
||||
<1> Tokens basically consist of hyphens, digits, letters, underscores and dots.
|
||||
<2> By default categorization ignores tokens that begin with a digit.
|
||||
<3> By default categorization also ignores tokens that are hexadecimal numbers.
|
||||
<4> Underscores, hypens and dots are removed from the beginning of tokens.
|
||||
<5> Also at the end of tokens.
|
||||
|
||||
The key difference between the default `categorization_analyzer` and this example
|
||||
analyzer is that using the `ml_classic` tokenizer is several times faster. (The
|
||||
difference in behavior is that this custom analyzer does not include accented
|
||||
letters in tokens whereas the `ml_classic` tokenizer will, although that could be
|
||||
fixed by using more complex regular expressions.)
|
||||
|
||||
NOTE: To add the `categorization_analyzer` property, you must use the **Edit JSON**
|
||||
tab and copy the `categorization_analyzer` object from one of the API examples above.
|
||||
|
||||
|
||||
After you open the job and start the {dfeed} or supply data to the job, you can
|
||||
view the results in {kib}. For example:
|
||||
|
|
|
@ -5,11 +5,15 @@ The following limitations and known problems apply to the {version} release of
|
|||
{xpack}:
|
||||
|
||||
[float]
|
||||
=== Categorization uses English tokenization rules and dictionary words
|
||||
=== Categorization uses English dictionary words
|
||||
//See x-pack-elasticsearch/#3021
|
||||
Categorization identifies static parts of unstructured logs and groups similar
|
||||
messages together. This is currently supported only for English language log
|
||||
messages.
|
||||
messages together. The default categorization tokenizer assumes English language
|
||||
log messages. For other languages you must define a different
|
||||
categorization_analyzer for your job. Additionally, a dictionary used to influence
|
||||
the categorization process contains only English words. This means categorization
|
||||
may work better in English than in other languages. The ability to customize the
|
||||
dictionary will be added in a future release.
|
||||
|
||||
[float]
|
||||
=== Pop-ups must be enabled in browsers
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Close Jobs</titleabbrev>
|
||||
++++
|
||||
|
||||
The close job API enables you to close one or more jobs.
|
||||
This API enables you to close one or more jobs.
|
||||
A job can be opened and closed multiple times throughout its lifecycle.
|
||||
|
||||
A closed job cannot receive data or perform analysis
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Delete Jobs from Calendar</titleabbrev>
|
||||
++++
|
||||
|
||||
The delete jobs from calendar API enables you to remove jobs from a calendar.
|
||||
This API enables you to remove jobs from a calendar.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Delete Calendar</titleabbrev>
|
||||
++++
|
||||
|
||||
The delete calendar API enables you to delete a calendar.
|
||||
This API enables you to delete a calendar.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Delete {dfeeds-cap}</titleabbrev>
|
||||
++++
|
||||
|
||||
The delete {dfeed} API enables you to delete an existing {dfeed}.
|
||||
This API enables you to delete an existing {dfeed}.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Delete Jobs</titleabbrev>
|
||||
++++
|
||||
|
||||
The delete job API enables you to delete an existing anomaly detection job.
|
||||
This API enables you to delete an existing anomaly detection job.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Delete Model Snapshots</titleabbrev>
|
||||
++++
|
||||
|
||||
The delete model snapshot API enables you to delete an existing model snapshot.
|
||||
This API enables you to delete an existing model snapshot.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Flush Jobs</titleabbrev>
|
||||
++++
|
||||
|
||||
The flush job API forces any buffered data to be processed by the job.
|
||||
This API forces any buffered data to be processed by the job.
|
||||
|
||||
|
||||
==== Request
|
||||
|
@ -15,7 +15,7 @@ The flush job API forces any buffered data to be processed by the job.
|
|||
|
||||
==== Description
|
||||
|
||||
The flush job API is only applicable when sending data for analysis using the
|
||||
The flush jobs API is only applicable when sending data for analysis using the
|
||||
<<ml-post-data,post data API>>. Depending on the content of the buffer, then it
|
||||
might additionally calculate new results.
|
||||
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
<titleabbrev>Forecast Jobs</titleabbrev>
|
||||
++++
|
||||
|
||||
The forecast jobs API uses historical behavior to predict the future behavior of
|
||||
a time series.
|
||||
This API uses historical behavior to predict the future behavior of a time
|
||||
series.
|
||||
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Get Buckets</titleabbrev>
|
||||
++++
|
||||
|
||||
The get bucket API enables you to retrieve job results for one or more buckets.
|
||||
This API enables you to retrieve job results for one or more buckets.
|
||||
|
||||
|
||||
==== Request
|
||||
|
@ -17,7 +17,8 @@ The get bucket API enables you to retrieve job results for one or more buckets.
|
|||
|
||||
==== Description
|
||||
|
||||
This API presents a chronological view of the records, grouped by bucket.
|
||||
The get buckets API presents a chronological view of the records, grouped by
|
||||
bucket.
|
||||
|
||||
|
||||
==== Path Parameters
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Get Calendars</titleabbrev>
|
||||
++++
|
||||
|
||||
The get calendars API enables you to retrieve configuration information for
|
||||
This API enables you to retrieve configuration information for
|
||||
calendars.
|
||||
|
||||
|
||||
|
|
|
@ -5,8 +5,7 @@
|
|||
<titleabbrev>Get Categories</titleabbrev>
|
||||
++++
|
||||
|
||||
The get categories API enables you to retrieve job results for one or more
|
||||
categories.
|
||||
This API enables you to retrieve job results for one or more categories.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,8 +5,7 @@
|
|||
<titleabbrev>Get {dfeed-cap} Statistics</titleabbrev>
|
||||
++++
|
||||
|
||||
The get {dfeed} statistics API enables you to retrieve usage information for
|
||||
{dfeeds}.
|
||||
This API enables you to retrieve usage information for {dfeeds}.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,8 +5,7 @@
|
|||
<titleabbrev>Get {dfeeds-cap}</titleabbrev>
|
||||
++++
|
||||
|
||||
The get {dfeeds} API enables you to retrieve configuration information for
|
||||
{dfeeds}.
|
||||
This API enables you to retrieve configuration information for {dfeeds}.
|
||||
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -5,8 +5,7 @@
|
|||
<titleabbrev>Get Influencers</titleabbrev>
|
||||
++++
|
||||
|
||||
The get influencers API enables you to retrieve job results for one or more
|
||||
influencers.
|
||||
This API enables you to retrieve job results for one or more influencers.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Get Job Statistics</titleabbrev>
|
||||
++++
|
||||
|
||||
The get jobs API enables you to retrieve usage information for jobs.
|
||||
This API enables you to retrieve usage information for jobs.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Get Jobs</titleabbrev>
|
||||
++++
|
||||
|
||||
The get jobs API enables you to retrieve configuration information for jobs.
|
||||
This API enables you to retrieve configuration information for jobs.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
<titleabbrev>Get Overall Buckets</titleabbrev>
|
||||
++++
|
||||
|
||||
This API enables you to retrieve overall bucket results
|
||||
that summarize the bucket results of multiple jobs.
|
||||
This API enables you to retrieve overall bucket results that summarize the
|
||||
bucket results of multiple jobs.
|
||||
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Get Records</titleabbrev>
|
||||
++++
|
||||
|
||||
The get records API enables you to retrieve anomaly records for a job.
|
||||
This API enables you to retrieve anomaly records for a job.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Get Model Snapshots</titleabbrev>
|
||||
++++
|
||||
|
||||
The get model snapshots API enables you to retrieve information about model snapshots.
|
||||
This API enables you to retrieve information about model snapshots.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -110,6 +110,18 @@ An analysis configuration object has the following properties:
|
|||
consideration for defining categories. For example, you can exclude SQL
|
||||
statements that appear in your log files. For more information, see
|
||||
{xpack-ref}/ml-configuring-categories.html[Categorizing Log Messages].
|
||||
This property cannot be used at the same time as `categorization_analyzer`.
|
||||
If you only want to define simple regular expression filters to be applied
|
||||
prior to tokenization then it is easiest to specify them using this property.
|
||||
If you also want to customize the tokenizer or post-tokenization filtering
|
||||
then these filters must be included in the `categorization_analyzer` as
|
||||
`pattern_replace` `char_filter`s. The effect is exactly the same.
|
||||
//<<ml-configuring-categories>>.
|
||||
|
||||
`categorization_analyzer`::
|
||||
(object or string) If `categorization_field_name` is specified,
|
||||
you can also define the analyzer that will be used to interpret the field
|
||||
to be categorized. See <<ml-categorizationanalyzer,categorization analyzer>>.
|
||||
//<<ml-configuring-categories>>.
|
||||
|
||||
`detectors`::
|
||||
|
@ -293,6 +305,102 @@ job creation fails.
|
|||
|
||||
--
|
||||
|
||||
[float]
|
||||
[[ml-categorizationanalyzer]]
|
||||
==== Categorization Analyzer
|
||||
|
||||
The categorization analyzer specifies how the `categorization_field` is
|
||||
interpreted by the categorization process. The syntax is very similar to that
|
||||
used to define the `analyzer` in the <<indices-analyze,Analyze endpoint>>.
|
||||
|
||||
The `categorization_analyzer` field can be specified either as a string or as
|
||||
an object.
|
||||
|
||||
If it is a string it must refer to a
|
||||
{ref}/analysis-analyzers.html[built-in analyzer] or one added by
|
||||
another plugin.
|
||||
|
||||
If it is an object it has the following properties:
|
||||
|
||||
`char_filter`::
|
||||
(array of strings or objects) One or more
|
||||
{ref}/analysis-charfilters.html[character filters]. In addition
|
||||
to the built-in character filters other plugins may provide more. This property
|
||||
is optional. If not specified then there will be no character filters. If
|
||||
you are customizing some other aspect of the analyzer and need to achieve
|
||||
the equivalent of `categorization_filters` (which are not permitted when some
|
||||
other aspect of the analyzer is customized), add them here as
|
||||
{ref}/analysis-pattern-replace-charfilter.html[pattern replace character filters].
|
||||
|
||||
`tokenizer`::
|
||||
(string or object) The name or definition of the
|
||||
{ref}/analysis-tokenizers.html[tokenizer] to use after character
|
||||
filters have been applied. This property is compulsory if `categorization_analyzer`
|
||||
is specified as an object. Machine learning provides a tokenizer called `ml_classic`
|
||||
that tokenizes in the same way as the non-customizable tokenizer in older versions of
|
||||
the product. If you would like to stick with this but change the character or token
|
||||
filters then specify `"tokenizer": "ml_classic"` in your `categorization_analyzer`.
|
||||
|
||||
`filter`::
|
||||
(array of strings or objects) One or more
|
||||
{ref}/analysis-tokenfilters.html[token filters]. In addition to the built-in token
|
||||
filters other plugins may provide more. This property is optional. If not specified
|
||||
then there will be no token filters.
|
||||
|
||||
If you omit `categorization_analyzer` entirely then the default that will be used is
|
||||
the one from the following job:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/_validate
|
||||
{
|
||||
"analysis_config" : {
|
||||
"categorization_analyzer" : {
|
||||
"tokenizer" : "ml_classic",
|
||||
"filter" : [
|
||||
{ "type" : "stop", "stopwords": [
|
||||
"Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday",
|
||||
"Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun",
|
||||
"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December",
|
||||
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec",
|
||||
"GMT", "UTC"
|
||||
] }
|
||||
]
|
||||
},
|
||||
"categorization_field_name": "message",
|
||||
"detectors" :[{
|
||||
"function":"count",
|
||||
"by_field_name": "mlcategory"
|
||||
}]
|
||||
},
|
||||
"data_description" : {
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
However, if you specify any part of `categorization_analyzer` then any omitted
|
||||
sub-properties are _not_ defaulted.
|
||||
|
||||
If you are categorizing non-English messages in a language where words are separated
|
||||
by spaces you may get better results if you change the day/month words in the stop
|
||||
token filter to those from your language. If you are categorizing messages in a language
|
||||
where words are not separated by spaces then you will need to use a different tokenizer
|
||||
as well in order to get sensible categorization results.
|
||||
|
||||
It is important to be aware that analyzing for categorization of machine generated
|
||||
log messages is a little different to tokenizing for search. Features that work well
|
||||
for search, such as stemming, synonym substitution and lowercasing are likely to make
|
||||
the results of categorization worse. However, in order for drilldown from machine
|
||||
learning results to work correctly, the tokens that the categorization analyzer
|
||||
produces need to be sufficiently similar to those produced by the search analyzer
|
||||
that if you search for the tokens that the categorization analyzer produces you will
|
||||
find the original document that the field to be categorized came from.
|
||||
|
||||
For more information, see {xpack-ref}/ml-configuring-categories.html[Categorizing Log Messages].
|
||||
//<<ml-configuring-categories>>.
|
||||
|
||||
|
||||
[float]
|
||||
[[ml-apilimits]]
|
||||
==== Analysis Limits
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
<titleabbrev>Open Jobs</titleabbrev>
|
||||
++++
|
||||
|
||||
This API enables you to open one or more jobs.
|
||||
A job must be opened in order for it to be ready to receive and analyze data.
|
||||
A job can be opened and closed multiple times throughout its lifecycle.
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Post Data to Jobs</titleabbrev>
|
||||
++++
|
||||
|
||||
The post data API enables you to send data to an anomaly detection job for analysis.
|
||||
This API enables you to send data to an anomaly detection job for analysis.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Preview {dfeeds-cap}</titleabbrev>
|
||||
++++
|
||||
|
||||
The preview {dfeed} API enables you to preview a {dfeed}.
|
||||
This API enables you to preview a {dfeed}.
|
||||
|
||||
|
||||
==== Request
|
||||
|
@ -15,9 +15,9 @@ The preview {dfeed} API enables you to preview a {dfeed}.
|
|||
|
||||
==== Description
|
||||
|
||||
The API returns the first "page" of results from the `search` that is created
|
||||
by using the current {dfeed} settings. This preview shows the structure of
|
||||
the data that will be passed to the anomaly detection engine.
|
||||
The preview {dfeeds} API returns the first "page" of results from the `search`
|
||||
that is created by using the current {dfeed} settings. This preview shows the
|
||||
structure of the data that will be passed to the anomaly detection engine.
|
||||
|
||||
|
||||
==== Path Parameters
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Add Jobs to Calendar</titleabbrev>
|
||||
++++
|
||||
|
||||
The add jobs to calendar API enables you to add jobs to a calendar.
|
||||
This API enables you to add jobs to a calendar.
|
||||
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Create Calendar</titleabbrev>
|
||||
++++
|
||||
|
||||
The create calendar API enables you to instantiate a calendar.
|
||||
This API enables you to instantiate a calendar.
|
||||
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Create {dfeeds-cap}</titleabbrev>
|
||||
++++
|
||||
|
||||
The create {dfeed} API enables you to instantiate a {dfeed}.
|
||||
This API enables you to instantiate a {dfeed}.
|
||||
|
||||
|
||||
==== Request
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Create Jobs</titleabbrev>
|
||||
++++
|
||||
|
||||
The create job API enables you to instantiate a job.
|
||||
This API enables you to instantiate a job.
|
||||
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Revert Model Snapshots</titleabbrev>
|
||||
++++
|
||||
|
||||
The revert model snapshot API enables you to revert to a specific snapshot.
|
||||
This API enables you to revert to a specific snapshot.
|
||||
|
||||
==== Request
|
||||
|
||||
|
@ -14,13 +14,13 @@ The revert model snapshot API enables you to revert to a specific snapshot.
|
|||
|
||||
==== Description
|
||||
|
||||
The {ml} feature in {xpack} reacts quickly to anomalous input, learning new behaviors in data.
|
||||
Highly anomalous input increases the variance in the models whilst the system learns
|
||||
whether this is a new step-change in behavior or a one-off event. In the case
|
||||
where this anomalous input is known to be a one-off, then it might be appropriate
|
||||
to reset the model state to a time before this event. For example, you might
|
||||
consider reverting to a saved snapshot after Black Friday
|
||||
or a critical system failure.
|
||||
The {ml} feature in {xpack} reacts quickly to anomalous input, learning new
|
||||
behaviors in data. Highly anomalous input increases the variance in the models
|
||||
whilst the system learns whether this is a new step-change in behavior or a
|
||||
one-off event. In the case where this anomalous input is known to be a one-off,
|
||||
then it might be appropriate to reset the model state to a time before this
|
||||
event. For example, you might consider reverting to a saved snapshot after Black
|
||||
Friday or a critical system failure.
|
||||
|
||||
////
|
||||
To revert to a saved snapshot, you must follow this sequence:
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
<titleabbrev>Start {dfeeds-cap}</titleabbrev>
|
||||
++++
|
||||
|
||||
This API enables you to start one or more {dfeeds}.
|
||||
A {dfeed} must be started in order to retrieve data from {es}.
|
||||
A {dfeed} can be started and stopped multiple times throughout its lifecycle.
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Stop {dfeeds-cap}</titleabbrev>
|
||||
++++
|
||||
|
||||
The stop {dfeeds} API enables you to stop one or more {dfeeds}.
|
||||
This API enables you to stop one or more {dfeeds}.
|
||||
|
||||
A {dfeed} that is stopped ceases to retrieve data from {es}.
|
||||
A {dfeed} can be started and stopped multiple times throughout its lifecycle.
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Update {dfeeds-cap}</titleabbrev>
|
||||
++++
|
||||
|
||||
The update {dfeed} API enables you to update certain properties of a {dfeed}.
|
||||
This API enables you to update certain properties of a {dfeed}.
|
||||
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Update Jobs</titleabbrev>
|
||||
++++
|
||||
|
||||
The update job API enables you to update certain properties of a job.
|
||||
This API enables you to update certain properties of a job.
|
||||
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Update Model Snapshots</titleabbrev>
|
||||
++++
|
||||
|
||||
The update model snapshot API enables you to update certain properties of a snapshot.
|
||||
This API enables you to update certain properties of a snapshot.
|
||||
|
||||
==== Request
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Validate Detectors </titleabbrev>
|
||||
++++
|
||||
|
||||
The validate detectors API validates detector configuration information.
|
||||
This API validates detector configuration information.
|
||||
|
||||
==== Request
|
||||
|
||||
|
@ -13,7 +13,8 @@ The validate detectors API validates detector configuration information.
|
|||
|
||||
==== Description
|
||||
|
||||
This API enables you validate the detector configuration before you create a job.
|
||||
The validate detectors API enables you validate the detector configuration
|
||||
before you create a job.
|
||||
|
||||
|
||||
==== Request Body
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<titleabbrev>Validate Jobs</titleabbrev>
|
||||
++++
|
||||
|
||||
The validate jobs API validates job configuration information.
|
||||
This API validates job configuration information.
|
||||
|
||||
==== Request
|
||||
|
||||
|
@ -13,7 +13,8 @@ The validate jobs API validates job configuration information.
|
|||
|
||||
==== Description
|
||||
|
||||
This API enables you validate the job configuration before you create the job.
|
||||
The validate jobs API enables you validate the job configuration before you
|
||||
create the job.
|
||||
|
||||
|
||||
==== Request Body
|
||||
|
|
|
@ -42,9 +42,9 @@ See <<java-clients>>.
|
|||
[[installing-node-certificates]]
|
||||
==== Node Certificates
|
||||
|
||||
See {ref}/node-certificates.html[Generating Node Certificates].
|
||||
See {ref}/configuring-tls.html#node-certificates[Generating Node Certificates].
|
||||
|
||||
[[enable-ssl]]
|
||||
==== Enabling TLS in the Node Configuration
|
||||
|
||||
See {ref}/enable-ssl.html[Enabling TLS on {es} Nodes].
|
||||
See {ref}/configuring-tls.html#enable-ssl[Enabling TLS on {es} Nodes].
|
||||
|
|
|
@ -82,6 +82,10 @@ public class PostCalendarEventsAction extends Action<PostCalendarEventsAction.Re
|
|||
public Request(String calendarId, List<ScheduledEvent> scheduledEvents) {
|
||||
this.calendarId = ExceptionsHelper.requireNonNull(calendarId, Calendar.ID.getPreferredName());
|
||||
this.scheduledEvents = ExceptionsHelper.requireNonNull(scheduledEvents, EVENTS.getPreferredName());
|
||||
|
||||
if (scheduledEvents.isEmpty()) {
|
||||
throw ExceptionsHelper.badRequestException("At least 1 event is required");
|
||||
}
|
||||
}
|
||||
|
||||
public String getCalendarId() {
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.index.analysis.AnalysisRegistry;
|
|||
import org.elasticsearch.xpack.ml.MlParserType;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndexFields;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
import org.elasticsearch.xpack.ml.utils.MlStrings;
|
||||
import org.elasticsearch.xpack.ml.utils.time.TimeUtils;
|
||||
|
@ -450,6 +451,23 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContentO
|
|||
+ PROCESS_MEMORY_OVERHEAD.getBytes();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the timestamp before which data is not accepted by the job.
|
||||
* This is the latest record timestamp minus the job latency.
|
||||
* @param dataCounts the job data counts
|
||||
* @return the timestamp before which data is not accepted by the job
|
||||
*/
|
||||
public long earliestValidTimestamp(DataCounts dataCounts) {
|
||||
long currentTime = 0;
|
||||
Date latestRecordTimestamp = dataCounts.getLatestRecordTimeStamp();
|
||||
if (latestRecordTimestamp != null) {
|
||||
TimeValue latency = analysisConfig.getLatency();
|
||||
long latencyMillis = latency == null ? 0 : latency.millis();
|
||||
currentTime = latestRecordTimestamp.getTime() - latencyMillis;
|
||||
}
|
||||
return currentTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(jobId);
|
||||
|
|
|
@ -51,6 +51,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
|
@ -369,10 +370,7 @@ public class JobProvider {
|
|||
ActionListener<AutodetectParams.Builder> getScheduledEventsListener = ActionListener.wrap(
|
||||
paramsBuilder -> {
|
||||
ScheduledEventsQueryBuilder scheduledEventsQueryBuilder = new ScheduledEventsQueryBuilder();
|
||||
Date lastestRecordTime = paramsBuilder.getDataCounts().getLatestRecordTimeStamp();
|
||||
if (lastestRecordTime != null) {
|
||||
scheduledEventsQueryBuilder.start(Long.toString(lastestRecordTime.getTime()));
|
||||
}
|
||||
scheduledEventsQueryBuilder.start(job.earliestValidTimestamp(paramsBuilder.getDataCounts()));
|
||||
scheduledEventsForJob(jobId, job.getGroups(), scheduledEventsQueryBuilder, ActionListener.wrap(
|
||||
events -> {
|
||||
paramsBuilder.setScheduledEvents(events.results());
|
||||
|
@ -1207,7 +1205,11 @@ public class JobProvider {
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(e);
|
||||
if (e instanceof IndexNotFoundException) {
|
||||
listener.onFailure(new ResourceNotFoundException("No calendar with id [" + calendarId + "]"));
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
}
|
||||
},
|
||||
client::get);
|
||||
|
|
|
@ -41,6 +41,11 @@ public class ScheduledEventsQueryBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public ScheduledEventsQueryBuilder start(long start) {
|
||||
this.start = Long.toString(start);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ScheduledEventsQueryBuilder end(String end) {
|
||||
this.end = end;
|
||||
return this;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.transform.chain;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.transform.ExecutableTransform;
|
||||
|
@ -21,7 +21,7 @@ public final class ChainTransformFactory extends TransformFactory<ChainTransform
|
|||
private final TransformRegistry registry;
|
||||
|
||||
public ChainTransformFactory(Settings settings, TransformRegistry registry) {
|
||||
super(Loggers.getLogger(ExecutableChainTransform.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableChainTransform.class, settings));
|
||||
this.registry = registry;
|
||||
}
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@ import org.elasticsearch.common.logging.Loggers;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.ml.action.DeleteExpiredDataAction;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -27,7 +26,6 @@ import java.util.function.Supplier;
|
|||
|
||||
import static org.elasticsearch.xpack.ClientHelper.ML_ORIGIN;
|
||||
import static org.elasticsearch.xpack.ClientHelper.executeAsyncWithOrigin;
|
||||
import static org.elasticsearch.xpack.ClientHelper.stashWithOrigin;
|
||||
|
||||
/**
|
||||
* A service that runs once a day and triggers maintenance tasks.
|
||||
|
|
|
@ -88,8 +88,7 @@ public class TransportPostCalendarEventsAction extends HandledTransportAction<Po
|
|||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
listener.onFailure(
|
||||
ExceptionsHelper.serverError("Error indexing event", e));
|
||||
listener.onFailure(ExceptionsHelper.serverError("Error indexing event", e));
|
||||
}
|
||||
});
|
||||
},
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.xpack.ml.MLMetadataField;
|
|||
import org.elasticsearch.xpack.ml.MlMetaIndex;
|
||||
import org.elasticsearch.xpack.ml.MlMetadata;
|
||||
import org.elasticsearch.xpack.ml.calendars.Calendar;
|
||||
import org.elasticsearch.xpack.ml.job.JobManager;
|
||||
import org.elasticsearch.xpack.ml.utils.ExceptionsHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -43,18 +42,16 @@ public class TransportPutCalendarAction extends HandledTransportAction<PutCalend
|
|||
|
||||
private final Client client;
|
||||
private final ClusterService clusterService;
|
||||
private final JobManager jobManager;
|
||||
|
||||
@Inject
|
||||
public TransportPutCalendarAction(Settings settings, ThreadPool threadPool,
|
||||
TransportService transportService, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
Client client, ClusterService clusterService, JobManager jobManager) {
|
||||
Client client, ClusterService clusterService) {
|
||||
super(settings, PutCalendarAction.NAME, threadPool, transportService, actionFilters,
|
||||
indexNameExpressionResolver, PutCalendarAction.Request::new);
|
||||
this.client = client;
|
||||
this.clusterService = clusterService;
|
||||
this.jobManager = jobManager;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -81,7 +78,6 @@ public class TransportPutCalendarAction extends HandledTransportAction<PutCalend
|
|||
new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse indexResponse) {
|
||||
jobManager.updateProcessOnCalendarChanged(calendar.getJobIds());
|
||||
listener.onResponse(new PutCalendarAction.Response(calendar));
|
||||
}
|
||||
|
||||
|
|
|
@ -52,6 +52,7 @@ import org.elasticsearch.xpack.persistent.PersistentTasksCustomMetaData;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
@ -132,6 +133,14 @@ public class JobManager extends AbstractComponent {
|
|||
return job;
|
||||
}
|
||||
|
||||
private Set<String> expandJobIds(String expression, boolean allowNoJobs, ClusterState clusterState) {
|
||||
MlMetadata mlMetadata = clusterState.getMetaData().custom(MLMetadataField.TYPE);
|
||||
if (mlMetadata == null) {
|
||||
mlMetadata = MlMetadata.EMPTY_METADATA;
|
||||
}
|
||||
return mlMetadata.expandJobIds(expression, allowNoJobs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the jobs that match the given {@code expression}.
|
||||
* Note that when the {@code jobId} is {@link MetaData#ALL} all jobs are returned.
|
||||
|
@ -142,11 +151,8 @@ public class JobManager extends AbstractComponent {
|
|||
* @return A {@link QueryPage} containing the matching {@code Job}s
|
||||
*/
|
||||
public QueryPage<Job> expandJobs(String expression, boolean allowNoJobs, ClusterState clusterState) {
|
||||
Set<String> expandedJobIds = expandJobIds(expression, allowNoJobs, clusterState);
|
||||
MlMetadata mlMetadata = clusterState.getMetaData().custom(MLMetadataField.TYPE);
|
||||
if (mlMetadata == null) {
|
||||
mlMetadata = MlMetadata.EMPTY_METADATA;
|
||||
}
|
||||
Set<String> expandedJobIds = mlMetadata.expandJobIds(expression, allowNoJobs);
|
||||
List<Job> jobs = new ArrayList<>();
|
||||
for (String expandedJobId : expandedJobIds) {
|
||||
jobs.add(mlMetadata.getJobs().get(expandedJobId));
|
||||
|
@ -335,7 +341,9 @@ public class JobManager extends AbstractComponent {
|
|||
|
||||
public void updateProcessOnCalendarChanged(List<String> calendarJobIds) {
|
||||
ClusterState clusterState = clusterService.state();
|
||||
for (String jobId : calendarJobIds) {
|
||||
Set<String> expandedJobIds = new HashSet<>();
|
||||
calendarJobIds.stream().forEach(jobId -> expandedJobIds.addAll(expandJobIds(jobId, true, clusterState)));
|
||||
for (String jobId : expandedJobIds) {
|
||||
if (isJobOpen(clusterState, jobId)) {
|
||||
updateJobProcessNotifier.submitJobUpdate(UpdateParams.scheduledEventsUpdate(jobId));
|
||||
}
|
||||
|
|
|
@ -60,7 +60,6 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.time.Duration;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
@ -280,7 +279,8 @@ public class AutodetectProcessManager extends AbstractComponent {
|
|||
|
||||
if (updateParams.isUpdateScheduledEvents()) {
|
||||
Job job = jobManager.getJobOrThrowIfUnknown(jobTask.getJobId());
|
||||
ScheduledEventsQueryBuilder query = new ScheduledEventsQueryBuilder().start(Long.toString(new Date().getTime()));
|
||||
DataCounts dataCounts = getStatistics(jobTask).get().v1();
|
||||
ScheduledEventsQueryBuilder query = new ScheduledEventsQueryBuilder().start(job.earliestValidTimestamp(dataCounts));
|
||||
jobProvider.scheduledEventsForJob(jobTask.getJobId(), job.getGroups(), query, eventsListener);
|
||||
} else {
|
||||
eventsListener.onResponse(null);
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.xpack.ml.job.results.Bucket;
|
|||
import org.elasticsearch.xpack.ml.job.results.CategoryDefinition;
|
||||
import org.elasticsearch.xpack.ml.job.results.Forecast;
|
||||
import org.elasticsearch.xpack.ml.job.results.ForecastRequestStats;
|
||||
import org.elasticsearch.xpack.ml.job.results.ForecastRequestStats.ForecastRequestStatus;
|
||||
import org.elasticsearch.xpack.ml.job.results.Influencer;
|
||||
import org.elasticsearch.xpack.ml.job.results.ModelPlot;
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.security;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext;
|
||||
|
@ -34,7 +34,7 @@ public class SecurityContext {
|
|||
* and {@link #getAuthentication()} will always return null.
|
||||
*/
|
||||
public SecurityContext(Settings settings, ThreadContext threadContext) {
|
||||
this.logger = Loggers.getLogger(getClass(), settings);
|
||||
this.logger = ServerLoggers.getLogger(getClass(), settings);
|
||||
this.threadContext = threadContext;
|
||||
this.nodeName = Node.NODE_NAME_SETTING.get(settings);
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ import org.elasticsearch.action.ActionListener;
|
|||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
|
@ -43,7 +43,7 @@ final class ExpiredTokenRemover extends AbstractRunnable {
|
|||
|
||||
ExpiredTokenRemover(Settings settings, Client client) {
|
||||
this.client = client;
|
||||
this.logger = Loggers.getLogger(getClass(), settings);
|
||||
this.logger = ServerLoggers.getLogger(getClass(), settings);
|
||||
this.timeout = TokenService.DELETE_TIMEOUT.get(settings);
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.security.authc;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -60,7 +60,7 @@ public class RealmConfig {
|
|||
}
|
||||
|
||||
public Logger logger(Class clazz) {
|
||||
return Loggers.getLogger(clazz, globalSettings);
|
||||
return ServerLoggers.getLogger(clazz, globalSettings);
|
||||
}
|
||||
|
||||
public Environment env() {
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -357,7 +357,7 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand {
|
|||
*/
|
||||
static Logger getTerminalLogger(final Terminal terminal) {
|
||||
final Logger logger = ESLoggerFactory.getLogger(ESNativeRealmMigrateTool.class);
|
||||
Loggers.setLevel(logger, Level.ALL);
|
||||
ServerLoggers.setLevel(logger, Level.ALL);
|
||||
|
||||
// create appender
|
||||
final Appender appender = new AbstractAppender(ESNativeRealmMigrateTool.class.getName(), null,
|
||||
|
@ -384,8 +384,8 @@ public class ESNativeRealmMigrateTool extends LoggingAwareMultiCommand {
|
|||
final Configuration config = ctx.getConfiguration();
|
||||
final LoggerConfig loggerConfig = config.getLoggerConfig(ESNativeRealmMigrateTool.class.getName());
|
||||
loggerConfig.setParent(null);
|
||||
loggerConfig.getAppenders().forEach((s, a) -> Loggers.removeAppender(logger, a));
|
||||
Loggers.addAppender(logger, appender);
|
||||
loggerConfig.getAppenders().forEach((s, a) -> ServerLoggers.removeAppender(logger, a));
|
||||
ServerLoggers.addAppender(logger, appender);
|
||||
return logger;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.ElasticsearchParseException;
|
|||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -98,7 +98,7 @@ public class SecurityIndexSearcherWrapper extends IndexSearcherWrapper {
|
|||
BitsetFilterCache bitsetFilterCache, ThreadContext threadContext, XPackLicenseState licenseState,
|
||||
ScriptService scriptService) {
|
||||
this.scriptService = scriptService;
|
||||
this.logger = Loggers.getLogger(getClass(), indexSettings.getSettings());
|
||||
this.logger = ServerLoggers.getLogger(getClass(), indexSettings.getSettings());
|
||||
this.queryShardContextProvider = queryShardContextProvider;
|
||||
this.bitsetFilterCache = bitsetFilterCache;
|
||||
this.threadContext = threadContext;
|
||||
|
|
|
@ -10,7 +10,7 @@ import io.netty.handler.ipfilter.IpFilterRuleType;
|
|||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
|
@ -117,7 +117,7 @@ public class IPFilter {
|
|||
|
||||
public IPFilter(final Settings settings, AuditTrailService auditTrail, ClusterSettings clusterSettings,
|
||||
XPackLicenseState licenseState) {
|
||||
this.logger = Loggers.getLogger(getClass(), settings);
|
||||
this.logger = ServerLoggers.getLogger(getClass(), settings);
|
||||
this.auditTrail = auditTrail;
|
||||
this.licenseState = licenseState;
|
||||
this.alwaysAllowBoundAddresses = ALLOW_BOUND_ADDRESSES_SETTING.get(settings);
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.bouncycastle.asn1.ASN1Sequence;
|
|||
import org.bouncycastle.asn1.ASN1String;
|
||||
import org.bouncycastle.asn1.ASN1TaggedObject;
|
||||
import org.bouncycastle.asn1.DERTaggedObject;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
/**
|
||||
|
@ -46,7 +46,7 @@ public final class RestrictedTrustManager extends X509ExtendedTrustManager {
|
|||
private final int SAN_CODE_OTHERNAME = 0;
|
||||
|
||||
public RestrictedTrustManager(Settings settings, X509ExtendedTrustManager delegate, CertificateTrustRestrictions restrictions) {
|
||||
this.logger = Loggers.getLogger(getClass(), settings);
|
||||
this.logger = ServerLoggers.getLogger(getClass(), settings);
|
||||
this.delegate = delegate;
|
||||
this.trustRestrictions = restrictions;
|
||||
logger.debug("Configured with trust restrictions: [{}]", restrictions);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.actions.email;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
|
||||
|
@ -25,7 +25,7 @@ public class EmailActionFactory extends ActionFactory {
|
|||
|
||||
public EmailActionFactory(Settings settings, EmailService emailService, TextTemplateEngine templateEngine,
|
||||
EmailAttachmentsParser emailAttachmentsParser) {
|
||||
super(Loggers.getLogger(ExecutableEmailAction.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableEmailAction.class, settings));
|
||||
this.emailService = emailService;
|
||||
this.templateEngine = templateEngine;
|
||||
this.htmlSanitizer = new HtmlSanitizer(settings);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.actions.hipchat;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
|
||||
|
@ -21,7 +21,7 @@ public class HipChatActionFactory extends ActionFactory {
|
|||
private final HipChatService hipchatService;
|
||||
|
||||
public HipChatActionFactory(Settings settings, TextTemplateEngine templateEngine, HipChatService hipchatService) {
|
||||
super(Loggers.getLogger(ExecutableHipChatAction.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableHipChatAction.class, settings));
|
||||
this.templateEngine = templateEngine;
|
||||
this.hipchatService = hipchatService;
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.actions.index;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -21,7 +21,7 @@ public class IndexActionFactory extends ActionFactory {
|
|||
private final TimeValue bulkDefaultTimeout;
|
||||
|
||||
public IndexActionFactory(Settings settings, Client client) {
|
||||
super(Loggers.getLogger(IndexActionFactory.class, settings));
|
||||
super(ServerLoggers.getLogger(IndexActionFactory.class, settings));
|
||||
this.client = client;
|
||||
this.indexDefaultTimeout = settings.getAsTime("xpack.watcher.actions.index.default_timeout", TimeValue.timeValueSeconds(30));
|
||||
this.bulkDefaultTimeout = settings.getAsTime("xpack.watcher.actions.bulk.default_timeout", TimeValue.timeValueMinutes(1));
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.actions.jira;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
|
||||
|
@ -20,7 +20,7 @@ public class JiraActionFactory extends ActionFactory {
|
|||
private final JiraService jiraService;
|
||||
|
||||
public JiraActionFactory(Settings settings, TextTemplateEngine templateEngine, JiraService jiraService) {
|
||||
super(Loggers.getLogger(ExecutableJiraAction.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableJiraAction.class, settings));
|
||||
this.templateEngine = templateEngine;
|
||||
this.jiraService = jiraService;
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.actions.logging;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
|
||||
import org.elasticsearch.xpack.watcher.actions.Action;
|
||||
|
@ -24,7 +24,7 @@ public class ExecutableLoggingAction extends ExecutableAction<LoggingAction> {
|
|||
|
||||
public ExecutableLoggingAction(LoggingAction action, Logger logger, Settings settings, TextTemplateEngine templateEngine) {
|
||||
super(action, logger);
|
||||
this.textLogger = action.category != null ? Loggers.getLogger(action.category, settings) : logger;
|
||||
this.textLogger = action.category != null ? ServerLoggers.getLogger(action.category, settings) : logger;
|
||||
this.templateEngine = templateEngine;
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.actions.logging;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
|
||||
|
@ -19,7 +19,7 @@ public class LoggingActionFactory extends ActionFactory {
|
|||
private final TextTemplateEngine templateEngine;
|
||||
|
||||
public LoggingActionFactory(Settings settings, TextTemplateEngine templateEngine) {
|
||||
super(Loggers.getLogger(ExecutableLoggingAction.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableLoggingAction.class, settings));
|
||||
this.settings = settings;
|
||||
this.templateEngine = templateEngine;
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.actions.pagerduty;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
|
||||
|
@ -20,7 +20,7 @@ public class PagerDutyActionFactory extends ActionFactory {
|
|||
private final PagerDutyService pagerDutyService;
|
||||
|
||||
public PagerDutyActionFactory(Settings settings, TextTemplateEngine templateEngine, PagerDutyService pagerDutyService) {
|
||||
super(Loggers.getLogger(ExecutablePagerDutyAction.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutablePagerDutyAction.class, settings));
|
||||
this.templateEngine = templateEngine;
|
||||
this.pagerDutyService = pagerDutyService;
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.actions.slack;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
|
||||
|
@ -19,7 +19,7 @@ public class SlackActionFactory extends ActionFactory {
|
|||
private final SlackService slackService;
|
||||
|
||||
public SlackActionFactory(Settings settings, TextTemplateEngine templateEngine, SlackService slackService) {
|
||||
super(Loggers.getLogger(ExecutableSlackAction.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableSlackAction.class, settings));
|
||||
this.templateEngine = templateEngine;
|
||||
this.slackService = slackService;
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.actions.webhook;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
|
||||
|
@ -24,7 +24,7 @@ public class WebhookActionFactory extends ActionFactory {
|
|||
public WebhookActionFactory(Settings settings, HttpClient httpClient, HttpRequestTemplate.Parser requestTemplateParser,
|
||||
TextTemplateEngine templateEngine) {
|
||||
|
||||
super(Loggers.getLogger(ExecutableWebhookAction.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableWebhookAction.class, settings));
|
||||
this.httpClient = httpClient;
|
||||
this.requestTemplateParser = requestTemplateParser;
|
||||
this.templateEngine = templateEngine;
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.watcher.execution;
|
|||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
|
||||
|
||||
|
@ -22,7 +22,7 @@ public class AsyncTriggerEventConsumer implements Consumer<Iterable<TriggerEvent
|
|||
private final ExecutionService executionService;
|
||||
|
||||
public AsyncTriggerEventConsumer(Settings settings, ExecutionService executionService) {
|
||||
this.logger = Loggers.getLogger(SyncTriggerEventConsumer.class, settings);
|
||||
this.logger = ServerLoggers.getLogger(SyncTriggerEventConsumer.class, settings);
|
||||
this.executionService = executionService;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ package org.elasticsearch.xpack.watcher.execution;
|
|||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
|
||||
|
||||
|
@ -22,7 +22,7 @@ public class SyncTriggerEventConsumer implements Consumer<Iterable<TriggerEvent>
|
|||
private final Logger logger;
|
||||
|
||||
public SyncTriggerEventConsumer(Settings settings, ExecutionService executionService) {
|
||||
this.logger = Loggers.getLogger(SyncTriggerEventConsumer.class, settings);
|
||||
this.logger = ServerLoggers.getLogger(SyncTriggerEventConsumer.class, settings);
|
||||
this.executionService = executionService;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.input.chain;
|
||||
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.input.ExecutableInput;
|
||||
|
@ -23,7 +23,7 @@ public class ChainInputFactory extends InputFactory<ChainInput, ChainInput.Resul
|
|||
private final InputRegistry inputRegistry;
|
||||
|
||||
public ChainInputFactory(Settings settings, InputRegistry inputRegistry) {
|
||||
super(Loggers.getLogger(ExecutableChainInput.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableChainInput.class, settings));
|
||||
this.inputRegistry = inputRegistry;
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.input.http;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.common.http.HttpClient;
|
||||
|
@ -23,7 +23,7 @@ public final class HttpInputFactory extends InputFactory<HttpInput, HttpInput.Re
|
|||
|
||||
public HttpInputFactory(Settings settings, HttpClient httpClient, TextTemplateEngine templateEngine,
|
||||
HttpRequestTemplate.Parser requestTemplateParser) {
|
||||
super(Loggers.getLogger(ExecutableHttpInput.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableHttpInput.class, settings));
|
||||
this.templateEngine = templateEngine;
|
||||
this.httpClient = httpClient;
|
||||
this.requestTemplateParser = requestTemplateParser;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.input.none;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.input.InputFactory;
|
||||
|
@ -15,7 +15,7 @@ import java.io.IOException;
|
|||
public class NoneInputFactory extends InputFactory<NoneInput, NoneInput.Result, ExecutableNoneInput> {
|
||||
|
||||
public NoneInputFactory(Settings settings) {
|
||||
super(Loggers.getLogger(ExecutableNoneInput.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableNoneInput.class, settings));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.input.search;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -25,7 +25,7 @@ public class SearchInputFactory extends InputFactory<SearchInput, SearchInput.Re
|
|||
|
||||
public SearchInputFactory(Settings settings, Client client, NamedXContentRegistry xContentRegistry,
|
||||
ScriptService scriptService) {
|
||||
super(Loggers.getLogger(ExecutableSearchInput.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableSearchInput.class, settings));
|
||||
this.client = client;
|
||||
this.defaultTimeout = settings.getAsTime("xpack.watcher.input.search.default_timeout", TimeValue.timeValueMinutes(1));
|
||||
this.searchTemplateService = new WatcherSearchTemplateService(settings, scriptService, xContentRegistry);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.input.simple;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.xpack.watcher.input.InputFactory;
|
||||
|
@ -15,7 +15,7 @@ import java.io.IOException;
|
|||
public class SimpleInputFactory extends InputFactory<SimpleInput, SimpleInput.Result, ExecutableSimpleInput> {
|
||||
|
||||
public SimpleInputFactory(Settings settings) {
|
||||
super(Loggers.getLogger(ExecutableSimpleInput.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableSimpleInput.class, settings));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.input.transform;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||
|
@ -32,7 +32,7 @@ public final class TransformInputFactory extends InputFactory<TransformInput, Tr
|
|||
private final TransformRegistry transformRegistry;
|
||||
|
||||
public TransformInputFactory(Settings settings, TransformRegistry transformRegistry) {
|
||||
super(Loggers.getLogger(ExecutableTransformInput.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableTransformInput.class, settings));
|
||||
this.transformRegistry = transformRegistry;
|
||||
}
|
||||
|
||||
|
|
|
@ -81,10 +81,15 @@ public class WatcherIndexTemplateRegistry extends AbstractComponent implements C
|
|||
return;
|
||||
}
|
||||
|
||||
// no master node, exit immediately
|
||||
DiscoveryNode masterNode = event.state().getNodes().getMasterNode();
|
||||
if (masterNode == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// if this node is newer than the master node, we probably need to add the history template, which might be newer than the
|
||||
// history template the master node has, so we need potentially add new templates despite being not the master node
|
||||
DiscoveryNode localNode = event.state().getNodes().getLocalNode();
|
||||
DiscoveryNode masterNode = event.state().getNodes().getMasterNode();
|
||||
boolean localNodeVersionAfterMaster = localNode.getVersion().after(masterNode.getVersion());
|
||||
|
||||
if (event.localNodeMaster() || localNodeVersionAfterMaster) {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.watcher.transform.script;
|
||||
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -18,7 +18,7 @@ public class ScriptTransformFactory extends TransformFactory<ScriptTransform, Sc
|
|||
private final ScriptService scriptService;
|
||||
|
||||
public ScriptTransformFactory(Settings settings, ScriptService scriptService) {
|
||||
super(Loggers.getLogger(ExecutableScriptTransform.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableScriptTransform.class, settings));
|
||||
this.scriptService = scriptService;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.transform.search;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -24,7 +24,7 @@ public class SearchTransformFactory extends TransformFactory<SearchTransform, Se
|
|||
private final WatcherSearchTemplateService searchTemplateService;
|
||||
|
||||
public SearchTransformFactory(Settings settings, Client client, NamedXContentRegistry xContentRegistry, ScriptService scriptService) {
|
||||
super(Loggers.getLogger(ExecutableSearchTransform.class, settings));
|
||||
super(ServerLoggers.getLogger(ExecutableSearchTransform.class, settings));
|
||||
this.client = client;
|
||||
this.defaultTimeout = settings.getAsTime("xpack.watcher.transform.search.default_timeout", TimeValue.timeValueMinutes(1));
|
||||
this.searchTemplateService = new WatcherSearchTemplateService(settings, scriptService, xContentRegistry);
|
||||
|
|
|
@ -1,74 +1,37 @@
|
|||
{
|
||||
"index_patterns": [ ".monitoring-beats-${monitoring.template.version}-*" ],
|
||||
"version": 7000001,
|
||||
"index_patterns": [
|
||||
".monitoring-beats-${monitoring.template.version}-*"
|
||||
],
|
||||
"settings": {
|
||||
"index.number_of_shards": 1,
|
||||
"index.number_of_replicas": 0,
|
||||
"index.auto_expand_replicas": "0-1",
|
||||
"index.codec": "best_compression",
|
||||
"index.format": 6,
|
||||
"index.codec": "best_compression"
|
||||
"index.number_of_replicas": 0,
|
||||
"index.number_of_shards": 1
|
||||
},
|
||||
"version": 7000001,
|
||||
"mappings": {
|
||||
"doc": {
|
||||
"dynamic": false,
|
||||
"properties": {
|
||||
"cluster_uuid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"timestamp": {
|
||||
"type": "date",
|
||||
"format": "date_time"
|
||||
},
|
||||
"interval_ms": {
|
||||
"type": "long"
|
||||
},
|
||||
"type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"source_node": {
|
||||
"properties": {
|
||||
"uuid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"host": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"transport_address": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"ip": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"beats_stats": {
|
||||
"properties": {
|
||||
"timestamp": {
|
||||
"type": "date",
|
||||
"format": "date_time"
|
||||
},
|
||||
"tags": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"beat": {
|
||||
"properties": {
|
||||
"uuid": {
|
||||
"host": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"uuid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"version": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"host": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -76,6 +39,41 @@
|
|||
"properties": {
|
||||
"beat": {
|
||||
"properties": {
|
||||
"cpu": {
|
||||
"properties": {
|
||||
"total": {
|
||||
"properties": {
|
||||
"norm": {
|
||||
"properties": {
|
||||
"pct": {
|
||||
"type": "double"
|
||||
}
|
||||
}
|
||||
},
|
||||
"pct": {
|
||||
"type": "double"
|
||||
},
|
||||
"value": {
|
||||
"type": "double"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"info": {
|
||||
"properties": {
|
||||
"ephemeral_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"uptime": {
|
||||
"properties": {
|
||||
"ms": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"memstats": {
|
||||
"properties": {
|
||||
"gc_next": {
|
||||
|
@ -86,6 +84,9 @@
|
|||
},
|
||||
"memory_total": {
|
||||
"type": "long"
|
||||
},
|
||||
"rss": {
|
||||
"type": "long"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -126,6 +127,12 @@
|
|||
"batches": {
|
||||
"type": "long"
|
||||
},
|
||||
"dropped": {
|
||||
"type": "long"
|
||||
},
|
||||
"duplicates": {
|
||||
"type": "long"
|
||||
},
|
||||
"failed": {
|
||||
"type": "long"
|
||||
},
|
||||
|
@ -199,10 +206,100 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"system": {
|
||||
"properties": {
|
||||
"cpu": {
|
||||
"properties": {
|
||||
"cores": {
|
||||
"type": "long"
|
||||
},
|
||||
"total": {
|
||||
"properties": {
|
||||
"norm": {
|
||||
"properties": {
|
||||
"pct": {
|
||||
"type": "double"
|
||||
}
|
||||
}
|
||||
},
|
||||
"pct": {
|
||||
"type": "double"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"load": {
|
||||
"properties": {
|
||||
"1": {
|
||||
"type": "double"
|
||||
},
|
||||
"15": {
|
||||
"type": "double"
|
||||
},
|
||||
"5": {
|
||||
"type": "double"
|
||||
},
|
||||
"norm": {
|
||||
"properties": {
|
||||
"1": {
|
||||
"type": "double"
|
||||
},
|
||||
"15": {
|
||||
"type": "double"
|
||||
},
|
||||
"5": {
|
||||
"type": "double"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"tags": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"timestamp": {
|
||||
"format": "date_time",
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
},
|
||||
"cluster_uuid": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"interval_ms": {
|
||||
"type": "long"
|
||||
},
|
||||
"source_node": {
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"ip": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"transport_address": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"uuid": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"timestamp": {
|
||||
"format": "date_time",
|
||||
"type": "date"
|
||||
},
|
||||
"type": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -266,6 +266,45 @@ public class JobManagerTests extends ESTestCase {
|
|||
assertThat(capturedUpdateParams.get(1).isUpdateScheduledEvents(), is(true));
|
||||
}
|
||||
|
||||
public void testUpdateProcessOnCalendarChanged_GivenGroups() {
|
||||
Job.Builder job1 = buildJobBuilder("job-1");
|
||||
job1.setGroups(Collections.singletonList("group-1"));
|
||||
Job.Builder job2 = buildJobBuilder("job-2");
|
||||
job2.setGroups(Collections.singletonList("group-1"));
|
||||
Job.Builder job3 = buildJobBuilder("job-3");
|
||||
|
||||
MlMetadata.Builder mlMetadata = new MlMetadata.Builder();
|
||||
mlMetadata.putJob(job1.build(), false);
|
||||
mlMetadata.putJob(job2.build(), false);
|
||||
mlMetadata.putJob(job3.build(), false);
|
||||
|
||||
PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder();
|
||||
addJobTask(job1.getId(), "node_id", JobState.OPENED, tasksBuilder);
|
||||
addJobTask(job2.getId(), "node_id", JobState.OPENED, tasksBuilder);
|
||||
addJobTask(job3.getId(), "node_id", JobState.OPENED, tasksBuilder);
|
||||
|
||||
ClusterState clusterState = ClusterState.builder(new ClusterName("_name"))
|
||||
.metaData(MetaData.builder()
|
||||
.putCustom(PersistentTasksCustomMetaData.TYPE, tasksBuilder.build())
|
||||
.putCustom(MLMetadataField.TYPE, mlMetadata.build()))
|
||||
.build();
|
||||
when(clusterService.state()).thenReturn(clusterState);
|
||||
|
||||
JobManager jobManager = createJobManager();
|
||||
|
||||
jobManager.updateProcessOnCalendarChanged(Collections.singletonList("group-1"));
|
||||
|
||||
ArgumentCaptor<UpdateParams> updateParamsCaptor = ArgumentCaptor.forClass(UpdateParams.class);
|
||||
verify(updateJobProcessNotifier, times(2)).submitJobUpdate(updateParamsCaptor.capture());
|
||||
|
||||
List<UpdateParams> capturedUpdateParams = updateParamsCaptor.getAllValues();
|
||||
assertThat(capturedUpdateParams.size(), equalTo(2));
|
||||
assertThat(capturedUpdateParams.get(0).getJobId(), equalTo(job1.getId()));
|
||||
assertThat(capturedUpdateParams.get(0).isUpdateScheduledEvents(), is(true));
|
||||
assertThat(capturedUpdateParams.get(1).getJobId(), equalTo(job2.getId()));
|
||||
assertThat(capturedUpdateParams.get(1).isUpdateScheduledEvents(), is(true));
|
||||
}
|
||||
|
||||
private Job.Builder createJob() {
|
||||
Detector.Builder d1 = new Detector.Builder("info_content", "domain");
|
||||
d1.setOverFieldName("client");
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.test.ESTestCase;
|
|||
import org.elasticsearch.xpack.ml.MachineLearningClientActionPlugin;
|
||||
import org.elasticsearch.xpack.ml.job.messages.Messages;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.AnomalyDetectorsIndexFields;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.state.DataCounts;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -551,6 +552,30 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
builder.build().estimateMemoryFootprint());
|
||||
}
|
||||
|
||||
public void testEarliestValidTimestamp_GivenEmptyDataCounts() {
|
||||
assertThat(createRandomizedJob().earliestValidTimestamp(new DataCounts("foo")), equalTo(0L));
|
||||
}
|
||||
|
||||
public void testEarliestValidTimestamp_GivenDataCountsAndZeroLatency() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
DataCounts dataCounts = new DataCounts(builder.getId());
|
||||
dataCounts.setLatestRecordTimeStamp(new Date(123456789L));
|
||||
|
||||
assertThat(builder.build().earliestValidTimestamp(dataCounts), equalTo(123456789L));
|
||||
}
|
||||
|
||||
public void testEarliestValidTimestamp_GivenDataCountsAndLatency() {
|
||||
Job.Builder builder = buildJobBuilder("foo");
|
||||
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(builder.build().getAnalysisConfig());
|
||||
analysisConfig.setLatency(TimeValue.timeValueMillis(1000L));
|
||||
builder.setAnalysisConfig(analysisConfig);
|
||||
|
||||
DataCounts dataCounts = new DataCounts(builder.getId());
|
||||
dataCounts.setLatestRecordTimeStamp(new Date(123456789L));
|
||||
|
||||
assertThat(builder.build().earliestValidTimestamp(dataCounts), equalTo(123455789L));
|
||||
}
|
||||
|
||||
public static Job.Builder buildJobBuilder(String id, Date date) {
|
||||
Job.Builder builder = new Job.Builder(id);
|
||||
builder.setCreateTime(date);
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.job.process.logging;
|
|||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.MockLogAppender;
|
||||
|
@ -206,15 +207,15 @@ public class CppLogMessageHandlerTests extends ESTestCase {
|
|||
private static void executeLoggingTest(InputStream is, MockLogAppender mockAppender, Level level, String jobId)
|
||||
throws IOException {
|
||||
Logger cppMessageLogger = Loggers.getLogger(CppLogMessageHandler.class);
|
||||
Loggers.addAppender(cppMessageLogger, mockAppender);
|
||||
ServerLoggers.addAppender(cppMessageLogger, mockAppender);
|
||||
|
||||
Level oldLevel = cppMessageLogger.getLevel();
|
||||
Loggers.setLevel(cppMessageLogger, level);
|
||||
ServerLoggers.setLevel(cppMessageLogger, level);
|
||||
try (CppLogMessageHandler handler = new CppLogMessageHandler(jobId, is)) {
|
||||
handler.tailStream();
|
||||
} finally {
|
||||
Loggers.removeAppender(cppMessageLogger, mockAppender);
|
||||
Loggers.setLevel(cppMessageLogger, oldLevel);
|
||||
ServerLoggers.removeAppender(cppMessageLogger, mockAppender);
|
||||
ServerLoggers.setLevel(cppMessageLogger, oldLevel);
|
||||
mockAppender.stop();
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ import org.apache.logging.log4j.core.config.Configuration;
|
|||
import org.apache.logging.log4j.core.config.LoggerConfig;
|
||||
import org.apache.logging.log4j.core.filter.RegexFilter;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -26,10 +26,10 @@ public class CapturingLogger {
|
|||
final StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
|
||||
final String name = caller.getClassName() + "." + caller.getMethodName() + "." + level.toString();
|
||||
final Logger logger = ESLoggerFactory.getLogger(name);
|
||||
Loggers.setLevel(logger, level);
|
||||
ServerLoggers.setLevel(logger, level);
|
||||
final MockAppender appender = new MockAppender(name);
|
||||
appender.start();
|
||||
Loggers.addAppender(logger, appender);
|
||||
ServerLoggers.addAppender(logger, appender);
|
||||
return logger;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger;
|
|||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
|
@ -908,10 +909,7 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
|||
} else {
|
||||
_unfilteredRoles = randomListFromLengthBetween(unfilteredPolicy, 1, unfilteredPolicy.size());
|
||||
// add roles from other filter policies
|
||||
final List<String> otherRoles = new ArrayList<>();
|
||||
for (int j = 0; j < randomIntBetween(1, 4); j++) {
|
||||
otherRoles.add(FILTER_MARKER + randomAlphaOfLengthBetween(1, 4));
|
||||
}
|
||||
final List<String> otherRoles = randomNonEmptyListOfFilteredNames("other");
|
||||
_unfilteredRoles.addAll(randomListFromLengthBetween(otherRoles, 1, otherRoles.size()));
|
||||
settingsBuilder.putList("xpack.security.audit.logfile.events.ignore_filters.otherPolicy.roles", otherRoles);
|
||||
}
|
||||
|
@ -1161,7 +1159,7 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
|||
Collections.emptyList());
|
||||
}
|
||||
}
|
||||
// filtered roles are a subset of the roles of any policy
|
||||
// filtered indices are a subset of the indices of any policy
|
||||
final List<String> filterPolicy = randomFrom(allFilteredIndices);
|
||||
final String[] filteredIndices = randomListFromLengthBetween(filterPolicy, 1, filterPolicy.size()).toArray(new String[0]);
|
||||
// unfiltered index sets either have indices distinct from any other in any
|
||||
|
@ -1177,10 +1175,7 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
|||
} else {
|
||||
_unfilteredIndices = randomListFromLengthBetween(unfilteredPolicy, 1, unfilteredPolicy.size());
|
||||
// add indices from other filter policies
|
||||
final List<String> otherIndices = new ArrayList<>();
|
||||
for (int j = 0; j < randomIntBetween(1, 4); j++) {
|
||||
otherIndices.add(FILTER_MARKER + randomAlphaOfLengthBetween(1, 4));
|
||||
}
|
||||
final List<String> otherIndices = randomNonEmptyListOfFilteredNames("other");
|
||||
_unfilteredIndices.addAll(randomListFromLengthBetween(otherIndices, 1, otherIndices.size()));
|
||||
settingsBuilder.putList("xpack.security.audit.logfile.events.ignore_filters.otherPolicy.indices", otherIndices);
|
||||
}
|
||||
|
@ -1506,10 +1501,10 @@ public class LoggingAuditTrailFilterTests extends ESTestCase {
|
|||
return ans;
|
||||
}
|
||||
|
||||
private List<String> randomNonEmptyListOfFilteredNames() {
|
||||
private List<String> randomNonEmptyListOfFilteredNames(String... namePrefix) {
|
||||
final List<String> filtered = new ArrayList<>(4);
|
||||
for (int i = 0; i < randomIntBetween(1, 4); i++) {
|
||||
filtered.add(FILTER_MARKER + randomAlphaOfLengthBetween(1, 4));
|
||||
filtered.add(FILTER_MARKER + Strings.arrayToCommaDelimitedString(namePrefix) + randomAlphaOfLengthBetween(1, 4));
|
||||
}
|
||||
return filtered;
|
||||
}
|
||||
|
|
|
@ -128,6 +128,17 @@ public class WatcherIndexTemplateRegistryTests extends ESTestCase {
|
|||
verifyZeroInteractions(client);
|
||||
}
|
||||
|
||||
public void testThatMissingMasterNodeDoesNothing() {
|
||||
DiscoveryNode localNode = new DiscoveryNode("node", ESTestCase.buildNewFakeTransportAddress(), Version.CURRENT);
|
||||
DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").add(localNode).build();
|
||||
|
||||
ClusterChangedEvent event = createClusterChangedEvent(Arrays.asList(WatcherIndexTemplateRegistry.TRIGGERED_TEMPLATE_NAME,
|
||||
WatcherIndexTemplateRegistry.WATCHES_TEMPLATE_NAME, ".watch-history-6"), nodes);
|
||||
registry.clusterChanged(event);
|
||||
|
||||
verifyZeroInteractions(client);
|
||||
}
|
||||
|
||||
private ClusterChangedEvent createClusterChangedEvent(List<String> existingTemplateNames, DiscoveryNodes nodes) {
|
||||
ClusterChangedEvent event = mock(ClusterChangedEvent.class);
|
||||
when(event.localNodeMaster()).thenReturn(nodes.isLocalNodeElectedMaster());
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.test;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.watcher.Watcher;
|
||||
|
@ -28,7 +28,7 @@ public class TimeWarpedWatcher extends Watcher {
|
|||
|
||||
public TimeWarpedWatcher(Settings settings) {
|
||||
super(settings);
|
||||
Logger logger = Loggers.getLogger(TimeWarpedWatcher.class, settings);
|
||||
Logger logger = ServerLoggers.getLogger(TimeWarpedWatcher.class, settings);
|
||||
logger.info("using time warped watchers plugin");
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.watcher.test.bench;
|
|||
|
||||
import org.bouncycastle.operator.OperatorCreationException;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
|
@ -226,7 +226,7 @@ public class WatcherExecutorServiceBenchmark {
|
|||
|
||||
public BenchmarkWatcher(Settings settings) {
|
||||
super(settings);
|
||||
Loggers.getLogger(XPackBenchmarkPlugin.class, settings).info("using watcher benchmark plugin");
|
||||
ServerLoggers.getLogger(XPackBenchmarkPlugin.class, settings).info("using watcher benchmark plugin");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -148,6 +148,7 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
|||
assertThat(response.getWatchesCount(), equalTo(1L));
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/1915")
|
||||
public void testLoadExistingWatchesUponStartup() throws Exception {
|
||||
ensureWatcherStarted();
|
||||
stopWatcher();
|
||||
|
@ -226,6 +227,7 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
|
|||
assertSingleExecutionAndCompleteWatchHistory(numWatches, numRecords);
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/3437")
|
||||
public void testTriggeredWatchLoading() throws Exception {
|
||||
createIndex("output");
|
||||
client().prepareIndex("my-index", "foo", "bar")
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
package org.elasticsearch.xpack.watcher.trigger;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.ServerLoggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -36,7 +36,7 @@ public class ScheduleTriggerEngineMock extends ScheduleTriggerEngine {
|
|||
|
||||
public ScheduleTriggerEngineMock(Settings settings, ScheduleRegistry scheduleRegistry, Clock clock) {
|
||||
super(settings, scheduleRegistry, clock);
|
||||
this.logger = Loggers.getLogger(ScheduleTriggerEngineMock.class, settings);
|
||||
this.logger = ServerLoggers.getLogger(ScheduleTriggerEngineMock.class, settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -92,6 +92,13 @@
|
|||
"job_ids": ["cal-job", "unknown-job"]
|
||||
}
|
||||
|
||||
---
|
||||
"Test get calendar given missing":
|
||||
- do:
|
||||
catch: /No calendar with id \[unknown\]/
|
||||
xpack.ml.get_calendars:
|
||||
calendar_id: "unknown"
|
||||
|
||||
---
|
||||
"Test PageParams":
|
||||
- do:
|
||||
|
@ -584,3 +591,15 @@
|
|||
- length: { events: 2 }
|
||||
- match: { events.0.description: ski }
|
||||
- match: { events.1.description: snow }
|
||||
|
||||
---
|
||||
"Test post calendar events given empty events":
|
||||
|
||||
- do:
|
||||
catch: /At least 1 event is required/
|
||||
xpack.ml.post_calendar_events:
|
||||
calendar_id: "foo"
|
||||
body: >
|
||||
{
|
||||
"events": []
|
||||
}
|
||||
|
|
|
@ -20,8 +20,10 @@ integTestRunner {
|
|||
'ml/ml_classic_analyze/Test analyze API with an analyzer that does what we used to do in native code',
|
||||
// Remove tests that are expected to throw an exception, because we cannot then
|
||||
// know whether to expect an authorization exception or a validation exception
|
||||
'ml/calendar_crud/Test get calendar given missing',
|
||||
'ml/calendar_crud/Test cannot create calendar with name _all',
|
||||
'ml/calendar_crud/Test PageParams with ID is invalid',
|
||||
'ml/calendar_crud/Test post calendar events given empty events',
|
||||
'ml/custom_all_field/Test querying custom all field',
|
||||
'ml/datafeeds_crud/Test delete datafeed with missing id',
|
||||
'ml/datafeeds_crud/Test put datafeed referring to missing job_id',
|
||||
|
|
|
@ -7,6 +7,7 @@ package org.elasticsearch.smoketest;
|
|||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
|
@ -28,6 +29,7 @@ import static org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule.
|
|||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
@TestLogging("org.elasticsearch.client:TRACE,tracer:TRACE")
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/2920")
|
||||
public class MonitoringWithWatcherRestIT extends ESRestTestCase {
|
||||
|
||||
@After
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.smoketest;
|
||||
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
public class PreventFailingBuildIT extends ESIntegTestCase {
|
||||
|
||||
public void testSoThatTestsDoNotFail() {
|
||||
// Noop
|
||||
|
||||
// This is required because if tests are not enable no
|
||||
// tests will be run in the entire project and all tests will fail.
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue