mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 17:38:44 +00:00
[DOCS] Re-enable XPack doc testing (elastic/x-pack-elasticsearch#1146)
Adds a gradle build for documentation testing, partially stolen from the xpack meta repo. Updated to make work with the current setup index.asciidoc was updated/enabled as a test, and various pre-existing tests appear to be working. There are a large number of tests still ignored due to missing proper // CONSOLE tags Original commit: elastic/x-pack-elasticsearch@1d596f0be3
This commit is contained in:
parent
73feee6317
commit
d993926ba4
194
docs/build.gradle
Normal file
194
docs/build.gradle
Normal file
@ -0,0 +1,194 @@
|
||||
import org.elasticsearch.gradle.test.NodeInfo
|
||||
|
||||
import java.nio.charset.StandardCharsets
|
||||
|
||||
apply plugin: 'elasticsearch.docs-test'
|
||||
|
||||
/* List of files that have snippets that probably should be converted to
|
||||
* `// CONSOLE` and `// TESTRESPONSE` but have yet to be converted. Try and
|
||||
* only remove entries from this list. When it is empty we'll remove it
|
||||
* entirely and have a party! There will be cake and everything.... */
|
||||
buildRestTests.expectedUnconvertedCandidates = [
|
||||
'en/rest-api/security/users.asciidoc',
|
||||
'en/rest-api/watcher/put-watch.asciidoc',
|
||||
'en/rest-api/ml/post-data.asciidoc',
|
||||
'en/security/authentication/user-cache.asciidoc',
|
||||
'en/security/authorization/field-and-document-access-control.asciidoc',
|
||||
'en/security/authorization/run-as-privilege.asciidoc',
|
||||
'en/security/tribe-clients-integrations/beats.asciidoc',
|
||||
'en/security/tribe-clients-integrations/http.asciidoc',
|
||||
'en/security/tribe-clients-integrations/monitoring.asciidoc',
|
||||
'en/security/tribe-clients-integrations/cross-cluster.asciidoc',
|
||||
'en/security/authorization/custom-roles-provider.asciidoc',
|
||||
'en/watcher/actions/email.asciidoc',
|
||||
'en/watcher/actions/hipchat.asciidoc',
|
||||
'en/watcher/actions/index.asciidoc',
|
||||
'en/watcher/actions/logging.asciidoc',
|
||||
'en/watcher/actions/pagerduty.asciidoc',
|
||||
'en/watcher/actions/slack.asciidoc',
|
||||
'en/watcher/actions/jira.asciidoc',
|
||||
'en/watcher/actions/webhook.asciidoc',
|
||||
'en/watcher/condition/always.asciidoc',
|
||||
'en/watcher/condition/array-compare.asciidoc',
|
||||
'en/watcher/condition/compare.asciidoc',
|
||||
'en/watcher/condition/never.asciidoc',
|
||||
'en/watcher/condition/script.asciidoc',
|
||||
'en/watcher/customizing-watches.asciidoc',
|
||||
'en/watcher/example-watches/example-watch-meetupdata.asciidoc',
|
||||
'en/watcher/how-watcher-works.asciidoc',
|
||||
'en/watcher/input/chain.asciidoc',
|
||||
'en/watcher/input/http.asciidoc',
|
||||
'en/watcher/input/search.asciidoc',
|
||||
'en/watcher/input/simple.asciidoc',
|
||||
'en/watcher/transform.asciidoc',
|
||||
'en/watcher/transform/chain.asciidoc',
|
||||
'en/watcher/transform/script.asciidoc',
|
||||
'en/watcher/transform/search.asciidoc',
|
||||
'en/watcher/trigger/schedule/cron.asciidoc',
|
||||
'en/watcher/trigger/schedule/daily.asciidoc',
|
||||
'en/watcher/trigger/schedule/hourly.asciidoc',
|
||||
'en/watcher/trigger/schedule/interval.asciidoc',
|
||||
'en/watcher/trigger/schedule/monthly.asciidoc',
|
||||
'en/watcher/trigger/schedule/weekly.asciidoc',
|
||||
'en/watcher/trigger/schedule/yearly.asciidoc',
|
||||
'en/watcher/troubleshooting.asciidoc',
|
||||
'en/ml/api-quickref.asciidoc',
|
||||
'en/rest-api/ml/close-job.asciidoc',
|
||||
'en/rest-api/ml/delete-datafeed.asciidoc',
|
||||
'en/rest-api/ml/delete-snapshot.asciidoc',
|
||||
'en/rest-api/ml/flush-job.asciidoc',
|
||||
'en/rest-api/ml/get-bucket.asciidoc',
|
||||
'en/rest-api/ml/get-category.asciidoc',
|
||||
'en/rest-api/ml/get-datafeed-stats.asciidoc',
|
||||
'en/rest-api/ml/get-job-stats.asciidoc',
|
||||
'en/rest-api/ml/get-record.asciidoc',
|
||||
'en/rest-api/ml/open-job.asciidoc',
|
||||
'en/rest-api/ml/preview-datafeed.asciidoc',
|
||||
'en/rest-api/ml/put-datafeed.asciidoc',
|
||||
'en/rest-api/ml/put-job.asciidoc',
|
||||
'en/rest-api/ml/start-datafeed.asciidoc',
|
||||
'en/rest-api/ml/stop-datafeed.asciidoc',
|
||||
'en/rest-api/ml/update-datafeed.asciidoc',
|
||||
'en/rest-api/ml/update-job.asciidoc',
|
||||
'en/rest-api/ml/update-snapshot.asciidoc',
|
||||
'en/rest-api/ml/validate-detector.asciidoc',
|
||||
'en/rest-api/ml/delete-job.asciidoc',
|
||||
'en/rest-api/ml/get-datafeed.asciidoc',
|
||||
'en/rest-api/ml/get-influencer.asciidoc',
|
||||
'en/rest-api/ml/get-job.asciidoc',
|
||||
'en/rest-api/ml/get-snapshot.asciidoc',
|
||||
'en/rest-api/ml/revert-snapshot.asciidoc',
|
||||
'en/rest-api/ml/validate-job.asciidoc',
|
||||
'en/rest-api/security/authenticate.asciidoc',
|
||||
'en/rest-api/watcher/stats.asciidoc',
|
||||
'en/security/authorization.asciidoc',
|
||||
'en/security/tribe-clients-integrations/logstash.asciidoc',
|
||||
'en/watcher/actions.asciidoc',
|
||||
'en/watcher/example-watches/watching-time-series-data.asciidoc',
|
||||
]
|
||||
|
||||
dependencies {
|
||||
testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'runtime')
|
||||
}
|
||||
|
||||
Closure waitWithAuth = { NodeInfo node, AntBuilder ant ->
|
||||
File tmpFile = new File(node.cwd, 'wait.success')
|
||||
// wait up to twenty seconds
|
||||
final long stopTime = System.currentTimeMillis() + 20000L;
|
||||
Exception lastException = null;
|
||||
while (System.currentTimeMillis() < stopTime) {
|
||||
lastException = null;
|
||||
// we use custom wait logic here as the elastic user is not available immediately and ant.get will fail when a 401 is returned
|
||||
HttpURLConnection httpURLConnection = null;
|
||||
try {
|
||||
httpURLConnection = (HttpURLConnection) new URL("http://${node.httpUri()}/_cluster/health").openConnection();
|
||||
httpURLConnection.setRequestProperty("Authorization", "Basic " +
|
||||
Base64.getEncoder().encodeToString("test_admin:changeme".getBytes(StandardCharsets.UTF_8)));
|
||||
httpURLConnection.setRequestMethod("GET");
|
||||
httpURLConnection.setConnectTimeout(1000);
|
||||
httpURLConnection.setReadTimeout(30000);
|
||||
httpURLConnection.connect();
|
||||
if (httpURLConnection.getResponseCode() == 200) {
|
||||
tmpFile.withWriter StandardCharsets.UTF_8.name(), {
|
||||
it.write(httpURLConnection.getInputStream().getText(StandardCharsets.UTF_8.name()))
|
||||
}
|
||||
break;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.debug("failed to call cluster health", e)
|
||||
lastException = e
|
||||
} finally {
|
||||
if (httpURLConnection != null) {
|
||||
httpURLConnection.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
// did not start, so wait a bit before trying again
|
||||
Thread.sleep(500L);
|
||||
}
|
||||
if (tmpFile.exists() == false && lastException != null) {
|
||||
logger.error("final attempt of calling cluster health failed", lastException)
|
||||
}
|
||||
return tmpFile.exists()
|
||||
}
|
||||
|
||||
integTestCluster {
|
||||
plugin ':x-pack-elasticsearch:plugin'
|
||||
setting 'script.inline', 'true'
|
||||
setupCommand 'setupTestAdmin',
|
||||
'bin/x-pack/users', 'useradd', 'test_admin', '-p', 'changeme', '-r', 'superuser'
|
||||
waitCondition = waitWithAuth
|
||||
}
|
||||
|
||||
|
||||
|
||||
buildRestTests.docs = fileTree(projectDir) {
|
||||
// No snippets in here!
|
||||
exclude 'build.gradle'
|
||||
// That is where the snippets go, not where they come from!
|
||||
exclude 'build'
|
||||
// These file simply doesn't pass yet. We should figure out how to fix them.
|
||||
exclude 'en/rest-api/watcher/ack-watch.asciidoc'
|
||||
exclude 'en/watcher/reference/actions.asciidoc'
|
||||
exclude 'en/rest-api/graph/explore.asciidoc'
|
||||
}
|
||||
|
||||
Map<String, String> setups = buildRestTests.setups
|
||||
setups['my_inactive_watch'] = '''
|
||||
- do:
|
||||
xpack.watcher.put_watch:
|
||||
id: "my_watch"
|
||||
master_timeout: "40s"
|
||||
active: false
|
||||
body: >
|
||||
{
|
||||
"trigger": {
|
||||
"schedule": {
|
||||
"hourly": {
|
||||
"minute": [ 0, 5 ]
|
||||
}
|
||||
}
|
||||
},
|
||||
"input": {
|
||||
"simple": {
|
||||
"payload": {
|
||||
"send": "yes"
|
||||
}
|
||||
}
|
||||
},
|
||||
"condition": {
|
||||
"always": {}
|
||||
},
|
||||
"actions": {
|
||||
"test_index": {
|
||||
"index": {
|
||||
"index": "test",
|
||||
"doc_type": "test2"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
- match: { _id: "my_watch" }
|
||||
'''
|
||||
setups['my_active_watch'] = setups['my_inactive_watch'].replace(
|
||||
'active: false', 'active: true')
|
@ -3,6 +3,7 @@
|
||||
|
||||
All {ml} endpoints have the following base:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
/_xpack/ml/
|
||||
----
|
||||
|
@ -29,49 +29,64 @@ The following example queries the info API:
|
||||
------------------------------------------------------------
|
||||
GET /_xpack
|
||||
------------------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
Example response:
|
||||
|
||||
[source,js]
|
||||
------------------------------------------------------------
|
||||
{
|
||||
"build": {
|
||||
"hash": "2798b1a3ce779b3611bb53a0082d4d741e4d3168",
|
||||
"timestamp": "2015-04-07T13:34:42Z"
|
||||
"build" : {
|
||||
"hash" : "2798b1a3ce779b3611bb53a0082d4d741e4d3168",
|
||||
"date" : "2015-04-07T13:34:42Z"
|
||||
},
|
||||
"license": {
|
||||
"uid": "893361dc-9749-4997-93cb-802e3dofh7aa",
|
||||
"type": "internal",
|
||||
"mode": "platinum",
|
||||
"status": "active",
|
||||
"expiry_date": "2030-08-29T23:59:59.999Z",
|
||||
"expiry_date_in_millis": 1914278399999
|
||||
"license" : {
|
||||
"uid" : "893361dc-9749-4997-93cb-802e3dofh7aa",
|
||||
"type" : "trial",
|
||||
"mode" : "trial",
|
||||
"status" : "active",
|
||||
"expiry_date_in_millis" : 1914278399999
|
||||
},
|
||||
"features": {
|
||||
"graph": {
|
||||
"description": "Graph Data Exploration for the Elastic Stack",
|
||||
"available": true,
|
||||
"enabled": true
|
||||
"features" : {
|
||||
"graph" : {
|
||||
"description" : "Graph Data Exploration for the Elastic Stack",
|
||||
"available" : true,
|
||||
"enabled" : true
|
||||
},
|
||||
"monitoring": {
|
||||
"description": "Monitoring for the Elastic Stack",
|
||||
"available": true,
|
||||
"enabled": true
|
||||
"ml" : {
|
||||
"description" : "Machine Learning for the Elastic Stack",
|
||||
"available" : true,
|
||||
"enabled" : true,
|
||||
"native_code_info" : {
|
||||
"version" : "6.0.0-alpha1-SNAPSHOT",
|
||||
"build_hash" : "d081461967d61a"
|
||||
}
|
||||
},
|
||||
"security": {
|
||||
"description": "Security for the Elastic Stack",
|
||||
"available": true,
|
||||
"enabled": true
|
||||
"monitoring" : {
|
||||
"description" : "Monitoring for the Elastic Stack",
|
||||
"available" : true,
|
||||
"enabled" : true
|
||||
},
|
||||
"watcher": {
|
||||
"description": "Alerting, Notification and Automation for the Elastic Stack",
|
||||
"available": true,
|
||||
"enabled": true
|
||||
"security" : {
|
||||
"description" : "Security for the Elastic Stack",
|
||||
"available" : true,
|
||||
"enabled" : true
|
||||
},
|
||||
"watcher" : {
|
||||
"description" : "Alerting, Notification and Automation for the Elastic Stack",
|
||||
"available" : true,
|
||||
"enabled" : true
|
||||
}
|
||||
},
|
||||
"tagline": "You know, for X"
|
||||
"tagline" : "You know, for X"
|
||||
}
|
||||
------------------------------------------------------------
|
||||
// TESTRESPONSE[s/"hash" : "2798b1a3ce779b3611bb53a0082d4d741e4d3168",/"hash" : "$body.build.hash",/]
|
||||
// TESTRESPONSE[s/"date" : "2015-04-07T13:34:42Z"/"date" : "$body.build.date"/]
|
||||
// TESTRESPONSE[s/"uid" : "893361dc-9749-4997-93cb-802e3dofh7aa",/"uid": "$body.license.uid",/]
|
||||
// TESTRESPONSE[s/"expiry_date_in_millis" : 1914278399999/"expiry_date_in_millis" : "$body.license.expiry_date_in_millis"/]
|
||||
// TESTRESPONSE[s/"version" : "6.0.0-alpha1-SNAPSHOT",/"version": "$body.features.ml.native_code_info.version",/]
|
||||
// TESTRESPONSE[s/"build_hash" : "d081461967d61a"/"build_hash": "$body.features.ml.native_code_info.build_hash"/]
|
||||
// So much s/// but at least we test that the layout is close to matching....
|
||||
|
||||
You can also control what information is returned using the `categories` and
|
||||
`human` parameters.
|
||||
@ -82,6 +97,7 @@ The following example only returns the build and features information:
|
||||
------------------------------------------------------------
|
||||
GET /_xpack?categories=build,features
|
||||
------------------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
The following example removes the descriptions from the response:
|
||||
|
||||
@ -89,6 +105,7 @@ The following example removes the descriptions from the response:
|
||||
------------------------------------------------------------
|
||||
GET /_xpack?human=false
|
||||
------------------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
include::security.asciidoc[]
|
||||
|
||||
|
@ -69,6 +69,7 @@ POST _xpack/ml/anomaly_detectors/event_rate/_close
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the job is closed, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"closed": true
|
||||
|
@ -42,6 +42,7 @@ DELETE _xpack/ml/datafeeds/datafeed-it-ops
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the data feed is deleted, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"acknowledged": true
|
||||
|
@ -53,6 +53,7 @@ DELETE _xpack/ml/anomaly_detectors/event_rate
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the job is deleted, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"acknowledged": true
|
||||
|
@ -49,7 +49,7 @@ DELETE _xpack/ml/anomaly_detectors/farequote/model_snapshots/1491948163
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the snapshot is deleted, you receive the following results:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"acknowledged": true
|
||||
|
@ -67,6 +67,7 @@ POST _xpack/ml/anomaly_detectors/farequote/_flush
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the operation succeeds, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"flushed": true
|
||||
|
@ -89,6 +89,7 @@ GET _xpack/ml/anomaly_detectors/it-ops-kpi/results/buckets
|
||||
|
||||
In this example, the API returns a single result that matches the specified
|
||||
score and time constraints:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"count": 1,
|
||||
|
@ -67,6 +67,7 @@ GET _xpack/ml/anomaly_detectors/it_ops_new_logs/results/categories
|
||||
// TEST[skip:todo]
|
||||
|
||||
In this example, the API returns the following information for each category:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"count": 11,
|
||||
|
@ -57,6 +57,7 @@ GET _xpack/ml/datafeeds/datafeed-farequote/_stats
|
||||
// TEST[skip:todo]
|
||||
|
||||
The API returns the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"count": 1,
|
||||
|
@ -56,6 +56,7 @@ GET _xpack/ml/datafeeds/datafeed-it-ops-kpi
|
||||
// TEST[skip:todo]
|
||||
|
||||
The API returns the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"count": 1,
|
||||
|
@ -85,6 +85,7 @@ GET _xpack/ml/anomaly_detectors/it_ops_new_kpi/results/influencers
|
||||
|
||||
In this example, the API returns the following information, sorted based on the
|
||||
influencer score in descending order:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"count": 28,
|
||||
|
@ -54,6 +54,7 @@ GET _xpack/ml/anomaly_detectors/farequote/_stats
|
||||
|
||||
In this example, the API returns a single result that matches the specified
|
||||
score and time constraints:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"count": 1,
|
||||
|
@ -53,6 +53,7 @@ GET _xpack/ml/anomaly_detectors/farequote
|
||||
|
||||
In this example, the API returns a single result that matches the specified
|
||||
score and time constraints:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"count": 1,
|
||||
|
@ -85,6 +85,7 @@ GET _xpack/ml/anomaly_detectors/it-ops-kpi/results/records
|
||||
|
||||
In this example, the API returns twelve results for the specified
|
||||
time constraints:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"count": 12,
|
||||
|
@ -85,6 +85,7 @@ GET _xpack/ml/anomaly_detectors/farequote/model_snapshots
|
||||
// TEST[skip:todo]
|
||||
|
||||
In this example, the API provides a single result:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"count": 1,
|
||||
|
@ -61,6 +61,7 @@ POST _xpack/ml/anomaly_detectors/event_rate/_open
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the job opens, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"opened": true
|
||||
|
@ -60,13 +60,13 @@ $ curl -s -H "Content-type: application/json"
|
||||
-X POST http:\/\/localhost:9200/_xpack/ml/anomaly_detectors/it_ops_new_kpi/_data
|
||||
--data-binary @it_ops_new_kpi.json
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:todo]
|
||||
|
||||
//TBD: Create example of how to post a small data example in Kibana?
|
||||
|
||||
When the data is sent, you receive information about the operational progress of the job.
|
||||
For example:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"job_id":"it_ops_new_kpi",
|
||||
|
@ -51,6 +51,7 @@ GET _xpack/ml/datafeeds/datafeed-farequote/_preview
|
||||
// TEST[skip:todo]
|
||||
|
||||
The data that is returned for this example is as follows:
|
||||
[source,js]
|
||||
----
|
||||
[
|
||||
{
|
||||
|
@ -89,6 +89,7 @@ PUT _xpack/ml/datafeeds/datafeed-it-ops-kpi
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the data feed is created, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"datafeed_id": "datafeed-it-ops-kpi",
|
||||
|
@ -89,6 +89,7 @@ PUT _xpack/ml/anomaly_detectors/it-ops-kpi
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the job is created, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"job_id": "it-ops-kpi",
|
||||
|
@ -101,6 +101,7 @@ _xpack/ml/anomaly_detectors/it_ops_new_kpi/model_snapshots/1491856080/_revert
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the operation is complete, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"acknowledged": true,
|
||||
|
@ -97,6 +97,7 @@ POST _xpack/ml/datafeeds/datafeed-it-ops-kpi/_start
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the job opens, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"started": true
|
||||
|
@ -53,6 +53,7 @@ POST _xpack/ml/datafeeds/datafeed-it-ops-kpi/_stop
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the data feed stops, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"stopped": true
|
||||
|
@ -114,6 +114,7 @@ POST _xpack/ml/datafeeds/datafeed-it-ops-kpi/_update
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the data feed is updated, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"datafeed_id": "datafeed-it-ops-kpi",
|
||||
|
@ -73,6 +73,7 @@ POST _xpack/ml/anomaly_detectors/it-ops-kpi/_update
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the job is updated, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"job_id": "it-ops-kpi",
|
||||
|
@ -66,6 +66,7 @@ _xpack/ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the snapshot is updated, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"acknowledged": true,
|
||||
|
@ -51,6 +51,7 @@ POST _xpack/ml/anomaly_detectors/_validate/detector
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the validation completes, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"acknowledged": true
|
||||
|
@ -61,6 +61,7 @@ POST _xpack/ml/anomaly_detectors/_validate
|
||||
// TEST[skip:todo]
|
||||
|
||||
When the validation is complete, you receive the following results:
|
||||
[source,js]
|
||||
----
|
||||
{
|
||||
"acknowledged": true
|
||||
|
@ -0,0 +1,91 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.smoketest;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.junit.After;
|
||||
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.xpack.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
public class XDocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
private static final String USER_TOKEN = basicAuthHeaderValue("test_admin", new SecureString("changeme".toCharArray()));
|
||||
|
||||
public XDocsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws Exception {
|
||||
return ESClientYamlSuiteTestCase.createParameters();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void afterIfFailed(List<Throwable> errors) {
|
||||
super.afterIfFailed(errors);
|
||||
String name = getTestName().split("=")[1];
|
||||
name = name.substring(0, name.length() - 1);
|
||||
name = name.replaceAll("/([^/]+)$", ".asciidoc:$1");
|
||||
logger.error("This failing test was generated by documentation starting at {}. It may include many snippets. "
|
||||
+ "See Elasticsearch's docs/README.asciidoc for an explanation of test generation.", name);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean preserveTemplatesUponCompletion() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* All tests run as a an administrative user but use <code>es-shield-runas-user</code> to become a less privileged user.
|
||||
*/
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", USER_TOKEN)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-enables watcher after every test just in case any test disables it. One does.
|
||||
*/
|
||||
@After
|
||||
public void reenableWatcher() throws Exception {
|
||||
getAdminExecutionContext().callApi("xpack.watcher.start", emptyMap(), emptyList(), emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes users after every test just in case any test adds any.
|
||||
*/
|
||||
@After
|
||||
public void deleteUsers() throws Exception {
|
||||
ClientYamlTestResponse response = getAdminExecutionContext().callApi("xpack.security.get_user", emptyMap(), emptyList(),
|
||||
emptyMap());
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> users = (Map<String, Object>) response.getBody();
|
||||
for (String user: users.keySet()) {
|
||||
Map<?, ?> metaDataMap = (Map<?, ?>) ((Map<?, ?>) users.get(user)).get("metadata");
|
||||
Boolean reserved = metaDataMap == null ? null : (Boolean) metaDataMap.get("_reserved");
|
||||
if (reserved == null || reserved == false) {
|
||||
logger.warn("Deleting leftover user {}", user);
|
||||
getAdminExecutionContext().callApi("xpack.security.delete_user", singletonMap("username", user), emptyList(), emptyMap());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user