[DOCS] Replace twitter dataset in index API docs (#60473) (#60510)

This commit is contained in:
James Rodewig 2020-07-31 09:51:47 -04:00 committed by GitHub
parent bd6b20a4ec
commit 4b12e69e8e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 110 additions and 112 deletions

View File

@ -10,9 +10,9 @@ include::{es-repo-dir}/glossary.asciidoc[tag=index-alias-desc]
[source,console]
----
PUT /twitter/_alias/alias1
PUT /my-index-000001/_alias/alias1
----
// TEST[setup:twitter]
// TEST[setup:my_index]
[[add-alias-api-request]]

View File

@ -12,8 +12,8 @@ include::{es-repo-dir}/glossary.asciidoc[tag=index-alias-desc]
----
HEAD /_alias/alias1
----
// TEST[setup:twitter]
// TEST[s/^/PUT twitter\/_alias\/alias1\n/]
// TEST[setup:my_index]
// TEST[s/^/PUT my-index-000001\/_alias\/alias1\n/]
[[alias-exists-api-request]]

View File

@ -13,11 +13,11 @@ include::{es-repo-dir}/glossary.asciidoc[tag=index-alias-desc]
POST /_aliases
{
"actions" : [
{ "add" : { "index" : "twitter", "alias" : "alias1" } }
{ "add" : { "index" : "my-index-000001", "alias" : "alias1" } }
]
}
----
// TEST[setup:twitter]
// TEST[setup:my_index]
[[indices-aliases-api-request]]

View File

@ -12,9 +12,9 @@ stream's backing indices.
[source,console]
--------------------------------------------------
POST /twitter/_reload_search_analyzers
POST /my-index-000001/_reload_search_analyzers
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
[discrete]
[[indices-reload-analyzers-api-request]]

View File

@ -9,9 +9,9 @@ caches of the stream's backing indices.
[source,console]
----
POST /twitter/_cache/clear
POST /my-index-000001/_cache/clear
----
// TEST[setup:twitter]
// TEST[setup:my_index]
[[clear-cache-api-request]]
@ -106,9 +106,9 @@ by setting the following query parameters to `true`:
[source,console]
----
POST /twitter/_cache/clear?fielddata=true <1>
POST /twitter/_cache/clear?query=true <2>
POST /twitter/_cache/clear?request=true <3>
POST /my-index-000001/_cache/clear?fielddata=true <1>
POST /my-index-000001/_cache/clear?query=true <2>
POST /my-index-000001/_cache/clear?request=true <3>
----
// TEST[continued]
@ -126,7 +126,7 @@ use the `fields` query parameter.
[source,console]
----
POST /twitter/_cache/clear?fields=foo,bar <1>
POST /my-index-000001/_cache/clear?fields=foo,bar <1>
----
// TEST[continued]
@ -138,9 +138,9 @@ POST /twitter/_cache/clear?fields=foo,bar <1>
[source,console]
----
POST /kimchy,elasticsearch/_cache/clear
POST /my-index-000001,my-index-000002/_cache/clear
----
// TEST[s/^/PUT kimchy\nPUT elasticsearch\n/]
// TEST[s/^/PUT my-index-000001\nPUT my-index-000002\n/]
[[clear-cache-api-all-ex]]

View File

@ -8,9 +8,9 @@ Clones an existing index.
[source,console]
--------------------------------------------------
POST /twitter/_clone/cloned-twitter-index
POST /my-index-000001/_clone/cloned-my-index-000001
--------------------------------------------------
// TEST[s/^/PUT twitter\n{"settings":{"index.number_of_shards" : 5,"blocks.write":true}}\n/]
// TEST[s/^/PUT my-index-000001\n{"settings":{"index.number_of_shards" : 5,"blocks.write":true}}\n/]
[[clone-index-api-request]]

View File

@ -8,9 +8,9 @@ Closes an index.
[source,console]
--------------------------------------------------
POST /twitter/_close
POST /my-index-000001/_close
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
[[close-index-api-request]]

View File

@ -8,7 +8,7 @@ Creates a new index.
[source,console]
--------------------------------------------------
PUT /twitter
PUT /my-index-000001
--------------------------------------------------
@ -81,7 +81,7 @@ associated with it, defined in the body:
[source,console]
--------------------------------------------------
PUT /twitter
PUT /my-index-000001
{
"settings": {
"index": {
@ -99,7 +99,7 @@ or more simplified
[source,console]
--------------------------------------------------
PUT /twitter
PUT /my-index-000001
{
"settings": {
"number_of_shards": 3,

View File

@ -38,7 +38,7 @@ The API returns the following response:
--------------------------------------------------
{
"dangling_indices": [
"index_name": "twitter",
"index_name": "my-index-000001",
"index_uuid": "zmM4e0JtBkeUjiHD-MihPQ",
"creation_date_millis": 1589414451372,
"node_ids": [

View File

@ -10,10 +10,10 @@ include::{es-repo-dir}/glossary.asciidoc[tag=index-alias-desc]
[source,console]
----
DELETE /twitter/_alias/alias1
DELETE /my-index-000001/_alias/alias1
----
// TEST[setup:twitter]
// TEST[s/^/PUT twitter\/_alias\/alias1\n/]
// TEST[setup:my_index]
// TEST[s/^/PUT my-index-000001\/_alias\/alias1\n/]
[[delete-alias-api-request]]
==== {api-request-title}

View File

@ -8,9 +8,9 @@ Deletes an existing index.
[source,console]
--------------------------------------------------
DELETE /twitter
DELETE /my-index-000001
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
[[delete-index-api-request]]

View File

@ -8,9 +8,9 @@ Flushes one or more data streams or indices.
[source,console]
--------------------------------------------------
POST /twitter/_flush
POST /my-index-000001/_flush
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
[[flush-api-request]]
@ -118,9 +118,9 @@ Defaults to `true`.
[source,console]
----
POST /kimchy/_flush
POST /my-index-000001/_flush
----
// TEST[s/^/PUT kimchy\n/]
// TEST[s/^/PUT my-index-000001\n/]
[[flush-multi-index]]
@ -128,9 +128,9 @@ POST /kimchy/_flush
[source,console]
----
POST /kimchy,elasticsearch/_flush
POST /my-index-000001,my-index-000002/_flush
----
// TEST[s/^/PUT kimchy\nPUT elasticsearch\n/]
// TEST[s/^/PUT my-index-000001\nPUT my-index-000002\n/]
[[flush-api-all-ex]]

View File

@ -10,9 +10,9 @@ indices.
[source,console]
----
POST /twitter/_forcemerge
POST /my-index-000001/_forcemerge
----
// TEST[setup:twitter]
// TEST[setup:my_index]
[[forcemerge-api-request]]
@ -139,7 +139,7 @@ NOTE: This parameter does *not* override the
[source,console]
----
POST /twitter/_forcemerge
POST /my-index-000001/_forcemerge
----
// TEST[continued]
@ -149,9 +149,9 @@ POST /twitter/_forcemerge
[source,console]
----
POST /kimchy,elasticsearch/_forcemerge
POST /my-index-000001,my-index-000002/_forcemerge
----
// TEST[s/^/PUT kimchy\nPUT elasticsearch\n/]
// TEST[s/^/PUT my-index-000001\nPUT my-index-000002\n/]
[[forcemerge-api-all-ex]]
@ -178,8 +178,8 @@ its shards can be force-merged to a single segment.
--------------------------------------------------
POST /.ds-logs-000001/_forcemerge?max_num_segments=1
--------------------------------------------------
// TEST[setup:twitter]
// TEST[s/.ds-logs-000001/twitter/]
// TEST[setup:my_index]
// TEST[s/.ds-logs-000001/my-index-000001/]
This can be a good idea because single-segment shards can sometimes use simpler
and more efficient data structures to perform searches.

View File

@ -10,10 +10,10 @@ include::{es-repo-dir}/glossary.asciidoc[tag=index-alias-desc]
[source,console]
----
GET /twitter/_alias/alias1
GET /my-index-000001/_alias/alias1
----
// TEST[setup:twitter]
// TEST[s/^/PUT twitter\/_alias\/alias1\n/]
// TEST[setup:my_index]
// TEST[s/^/PUT my-index-000001\/_alias\/alias1\n/]
[[get-alias-api-request]]

View File

@ -12,9 +12,9 @@ or if an index mapping contains a large number of fields.
[source,console]
----
GET /twitter/_mapping/field/user
GET /my-index-000001/_mapping/field/user
----
// TEST[setup:twitter]
// TEST[setup:my_index]
[[get-field-mapping-api-request]]
@ -224,14 +224,14 @@ retrieve mappings for all fields in the targeted data streams or indices.
However, the `<field>` parameter does not support the `_all` value.
For example, the following request retrieves mappings for the `message` field in
any data stream or index named `twitter` or `kimchy`.
any data stream or index named `my-index-000001` or `my-index-000002`.
[source,console]
----
GET /twitter,kimchy/_mapping/field/message
GET /my-index-000001,my-index-000002/_mapping/field/message
----
// TEST[setup:twitter]
// TEST[s/^/PUT kimchy\n/]
// TEST[setup:my_index]
// TEST[s/^/PUT my-index-000002\n/]
The following request retrieves mappings for the `message` and `user.id` fields
in any data stream or index in the cluster.
@ -240,7 +240,7 @@ in any data stream or index in the cluster.
----
GET /_all/_mapping/field/message
----
// TEST[setup:twitter]
// TEST[setup:my_index]
The following request retrieves mappings for fields with an `id` property in any
data stream or index in the cluster.
@ -249,4 +249,4 @@ data stream or index in the cluster.
----
GET /_all/_mapping/field/*.id
----
// TEST[setup:twitter]
// TEST[setup:my_index]

View File

@ -9,9 +9,9 @@ returns information about the stream's backing indices.
[source,console]
--------------------------------------------------
GET /twitter
GET /my-index-000001
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
NOTE: Before 7.0.0, the 'mappings' definition used to include a type name. Although mappings
in responses no longer contain a type name by default, you can still request the old format

View File

@ -9,9 +9,9 @@ streams, the API retrieves mappings for the stream's backing indices.
[source,console]
--------------------------------------------------
GET /twitter/_mapping
GET /my-index-000001/_mapping
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
NOTE: Before 7.0.0, the 'mappings' definition used to include a type name. Although mappings
in responses no longer contain a type name by default, you can still request the old format
@ -73,10 +73,9 @@ The following are some examples:
[source,console]
--------------------------------------------------
GET /twitter,kimchy/_mapping
GET /my-index-000001,my-index-000002/_mapping
--------------------------------------------------
// TEST[setup:twitter]
// TEST[s/^/PUT kimchy\nPUT book\n/]
// TEST[s/^/PUT my-index-000001\nPUT my-index-000002\n/]
If you want to get mappings of all indices in a cluster, the following
examples are equivalent:
@ -89,4 +88,4 @@ GET /_all/_mapping
GET /_mapping
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]

View File

@ -9,9 +9,9 @@ returns setting information for the stream's backing indices.
[source,console]
--------------------------------------------------
GET /twitter/_settings
GET /my-index-000001/_settings
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
[[get-index-settings-api-request]]
@ -71,14 +71,14 @@ Wildcard expressions are also supported. The following are some examples:
[source,console]
--------------------------------------------------
GET /twitter,kimchy/_settings
GET /my-index-000001,my-index-000002/_settings
GET /_all/_settings
GET /log_2013_*/_settings
GET /log_2099_*/_settings
--------------------------------------------------
// TEST[setup:twitter]
// TEST[s/^/PUT kimchy\nPUT log_2013_01_01\n/]
// TEST[setup:my_index]
// TEST[s/^/PUT my-index-000002\nPUT log_2099_01_01\n/]
===== Filtering settings by name
@ -87,6 +87,6 @@ as follows:
[source,console]
--------------------------------------------------
GET /log_2013_-*/_settings/index.number_*
GET /log_2099_-*/_settings/index.number_*
--------------------------------------------------
// TEST[continued]

View File

@ -8,9 +8,9 @@ Checks if an index exists.
[source,console]
--------------------------------------------------
HEAD /twitter
HEAD /my-index-000001
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
[[indices-exists-api-request]]

View File

@ -9,10 +9,10 @@ opens any closed backing indices.
[source,console]
--------------------------------------------------
POST /twitter/_open
POST /my-index-000001/_open
--------------------------------------------------
// TEST[setup:twitter]
// TEST[s/^/POST \/twitter\/_close\n/]
// TEST[setup:my_index]
// TEST[s/^/POST \/my-index-000001\/_close\n/]
[[open-index-api-request]]

View File

@ -11,9 +11,9 @@ indices.
[source,console]
----
GET /twitter/_recovery
GET /my-index-000001/_recovery
----
// TEST[setup:twitter]
// TEST[setup:my_index]
[[index-recovery-api-request]]

View File

@ -9,9 +9,9 @@ backing indices.
[source,console]
----
POST /twitter/_refresh
POST /my-index-000001/_refresh
----
// TEST[setup:twitter]
// TEST[setup:my_index]
[[refresh-api-request]]
@ -93,9 +93,9 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailab
[source,console]
----
POST /kimchy,elasticsearch/_refresh
POST /my-index-000001,my-index-000002/_refresh
----
// TEST[s/^/PUT kimchy\nPUT elasticsearch\n/]
// TEST[s/^/PUT my-index-000001\nPUT my-index-000002\n/]
[[refresh-api-all-ex]]

View File

@ -48,7 +48,7 @@ DELETE /_index_template/foo_data_stream
[source,console]
----
GET /_resolve/index/twitter*
GET /_resolve/index/my-index-*
----
[[resolve-index-api-request]]

View File

@ -13,7 +13,7 @@ index becomes the data stream's write index and its generation is incremented.
[source,console]
----
POST /alias1/_rollover/twitter
POST /alias1/_rollover/my-index-000002
{
"conditions": {
"max_age": "7d",
@ -193,8 +193,8 @@ POST /logs_write/_rollover <2>
}
}
--------------------------------------------------
// TEST[setup:huge_twitter]
// TEST[s/# Add > 1000 documents to logs-000001/POST _reindex?refresh\n{"source":{"index":"twitter"},"dest":{"index":"logs-000001"}}/]
// TEST[setup:my_index_huge]
// TEST[s/# Add > 1000 documents to logs-000001/POST _reindex?refresh\n{"source":{"index":"my-index-000001"},"dest":{"index":"logs-000001"}}/]
<1> Creates an index called `logs-0000001` with the alias `logs_write`.
<2> If the index pointed to by `logs_write` was created 7 or more days ago, or
contains 1,000 or more documents, or has an index size at least around 5GB, then the `logs-000002` index is created
@ -235,7 +235,6 @@ PUT _index_template/template
"data_stream": { }
}
-----------------------------------
// TEST
[source,console]
--------------------------------------------------
@ -253,8 +252,8 @@ POST /my-data-stream/_rollover <2>
}
--------------------------------------------------
// TEST[continued]
// TEST[setup:huge_twitter]
// TEST[s/# Add > 1000 documents to my-data-stream/POST _reindex?refresh\n{"source":{"index":"twitter"},"dest":{"index":"my-data-stream","op_type":"create"},"script":{"source":"ctx._source.put('@timestamp',ctx._source.remove('date'))"}}/]
// TEST[setup:my_index_huge]
// TEST[s/# Add > 1000 documents to my-data-stream/POST _reindex?refresh\n{ "source": { "index": "my-index-000001" }, "dest": { "index": "my-data-stream", "op_type": "create" } }/]
<1> Creates a data stream called `my-data-stream` with one initial backing index
named `my-data-stream-000001`.
<2> This request creates a new backing index, `my-data-stream-000002`, and adds

View File

@ -10,9 +10,9 @@ the stream's backing indices.
[source,console]
----
GET /twitter/_segments
GET /my-index-000001/_segments
----
// TEST[setup:twitter]
// TEST[setup:my_index]
[[index-segments-api-request]]

View File

@ -11,9 +11,9 @@ information for the stream's backing indices.
[source,console]
----
GET /twitter/_shard_stores
GET /my-index-000001/_shard_stores
----
// TEST[setup:twitter]
// TEST[setup:my_index]
[[index-shard-stores-api-request]]

View File

@ -9,9 +9,9 @@ Shrinks an existing index into a new index with fewer primary shards.
[source,console]
----
POST /twitter/_shrink/shrunk-twitter-index
POST /my-index-000001/_shrink/shrunk-my-index-000001
----
// TEST[s/^/PUT twitter\n{"settings":{"index.number_of_shards":2,"blocks.write":true}}\n/]
// TEST[s/^/PUT my-index-000001\n{"settings":{"index.number_of_shards":2,"blocks.write":true}}\n/]
[[shrink-index-api-request]]

View File

@ -8,14 +8,14 @@ Splits an existing index into a new index with more primary shards.
[source,console]
----
POST /twitter/_split/split-twitter-index
POST /my-index-000001/_split/split-my-index-000001
{
"settings": {
"index.number_of_shards": 2
}
}
----
// TEST[s/^/PUT twitter\n{"settings":{"blocks.write":true}}\n/]
// TEST[s/^/PUT my-index-000001\n{"settings":{"blocks.write":true}}\n/]
[[split-index-api-request]]

View File

@ -9,9 +9,9 @@ statistics for the stream's backing indices.
[source,console]
----
GET /twitter/_stats
GET /my-index-000001/_stats
----
// TEST[setup:twitter]
// TEST[setup:my_index]
[[index-stats-api-request]]
@ -112,7 +112,7 @@ GET /index1,index2/_stats
--------------------------------------------------
GET /_stats
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
[[index-stats-api-specific-stats-ex]]
@ -126,7 +126,7 @@ for all indices.
--------------------------------------------------
GET /_stats/merge,refresh
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
[[index-stats-api-specific-groups-ex]]
@ -140,4 +140,4 @@ for the `group1` and `group2` search groups.
--------------------------------------------------
GET /_stats/search?groups=group1,group2
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]

View File

@ -11,14 +11,14 @@ default.
[source,console]
--------------------------------------------------
PUT /twitter/_settings
PUT /my-index-000001/_settings
{
"index" : {
"number_of_replicas" : 2
}
}
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
[[update-index-settings-api-request]]
@ -77,14 +77,14 @@ To revert a setting to the default value, use `null`. For example:
[source,console]
--------------------------------------------------
PUT /twitter/_settings
PUT /my-index-000001/_settings
{
"index" : {
"refresh_interval" : null
}
}
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
The list of per-index settings which can be updated dynamically on live
indices can be found in <<index-modules>>.
@ -101,14 +101,14 @@ use:
[source,console]
--------------------------------------------------
PUT /twitter/_settings
PUT /my-index-000001/_settings
{
"index" : {
"refresh_interval" : "-1"
}
}
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]
(Another optimization option is to start the index without any replicas,
and only later adding them, but that really depends on the use case).
@ -118,7 +118,7 @@ the defaults for example):
[source,console]
--------------------------------------------------
PUT /twitter/_settings
PUT /my-index-000001/_settings
{
"index" : {
"refresh_interval" : "1s"
@ -131,7 +131,7 @@ And, a force merge should be called:
[source,console]
--------------------------------------------------
POST /twitter/_forcemerge?max_num_segments=5
POST /my-index-000001/_forcemerge?max_num_segments=5
--------------------------------------------------
// TEST[continued]
@ -163,13 +163,13 @@ new data stream and reindex your data into it. See
====
For example,
the following commands add the `content` analyzer to the `twitter` index:
the following commands add the `content` analyzer to the `my-index-000001` index:
[source,console]
--------------------------------------------------
POST /twitter/_close
POST /my-index-000001/_close
PUT /twitter/_settings
PUT /my-index-000001/_settings
{
"analysis" : {
"analyzer":{
@ -181,6 +181,6 @@ PUT /twitter/_settings
}
}
POST /twitter/_open
POST /my-index-000001/_open
--------------------------------------------------
// TEST[setup:twitter]
// TEST[setup:my_index]