Include size of snapshot in snapshot metadata #18543, bwc clean up (#30890)

This commit is contained in:
Vladimir Dolzhenko 2018-05-26 21:20:44 +02:00 committed by GitHub
parent 0698dd017c
commit b55b079a90
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 8 additions and 74 deletions

View File

@ -597,12 +597,7 @@ The output looks similar to the following:
"size_in_bytes": 4704
},
"start_time_in_millis": 1526280280355,
"time_in_millis": 358,
"number_of_files": 8,
"processed_files": 8,
"total_size_in_bytes": 4704,
"processed_size_in_bytes": 4704
"time_in_millis": 358
}
}
]
@ -616,9 +611,6 @@ the `stats` object contains a `total` section for all the files that are referen
for those files that actually needed to be copied over as part of the incremental snapshotting. In case of a snapshot that's still
in progress, there's also a `processed` section that contains information about the files that are in the process of being copied.
_Note_: Properties `number_of_files`, `processed_files`, `total_size_in_bytes` and `processed_size_in_bytes` are used for
backward compatibility reasons with older 5.x and 6.x versions. These fields will be removed in Elasticsearch v7.0.0.
Multiple ids are also supported:
[source,sh]

View File

@ -11,9 +11,6 @@ setup:
---
"Get snapshot status":
- skip:
version: " - 6.99.99"
reason: "backporting in progress: https://github.com/elastic/elasticsearch/pull/29602"
- do:
indices.create:
index: test_index
@ -39,38 +36,10 @@ setup:
- gt: { snapshots.0.stats.incremental.file_count: 0 }
- gt: { snapshots.0.stats.incremental.size_in_bytes: 0 }
- gt: { snapshots.0.stats.total.file_count: 0 }
- gt: { snapshots.0.stats.total.size_in_bytes: 0 }
- is_true: snapshots.0.stats.start_time_in_millis
- is_true: snapshots.0.stats.time_in_millis
---
"Get snapshot status with BWC fields":
- do:
indices.create:
index: test_index
body:
settings:
number_of_shards: 1
number_of_replicas: 0
- do:
snapshot.create:
repository: test_repo_status_1
snapshot: test_snapshot_bwc
wait_for_completion: true
- do:
snapshot.status:
repository: test_repo_status_1
snapshot: test_snapshot_bwc
- is_true: snapshots
- match: { snapshots.0.snapshot: test_snapshot_bwc }
- match: { snapshots.0.state: SUCCESS }
- gt: { snapshots.0.stats.number_of_files: 0 }
- gt: { snapshots.0.stats.processed_files: 0 }
- gt: { snapshots.0.stats.total_size_in_bytes: 0 }
- gt: { snapshots.0.stats.processed_size_in_bytes: 0 }
---
"Get missing snapshot status throws an exception":

View File

@ -132,7 +132,7 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
out.writeVLong(incrementalSize);
out.writeVLong(processedSize);
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
out.writeVInt(totalFileCount);
out.writeVLong(totalSize);
}
@ -149,7 +149,7 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
incrementalSize = in.readVLong();
processedSize = in.readVLong();
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
totalFileCount = in.readVInt();
totalSize = in.readVLong();
} else {
@ -172,15 +172,6 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
static final String START_TIME_IN_MILLIS = "start_time_in_millis";
static final String TIME_IN_MILLIS = "time_in_millis";
static final String TIME = "time";
// BWC
static final String NUMBER_OF_FILES = "number_of_files";
static final String PROCESSED_FILES = "processed_files";
static final String TOTAL_SIZE = "total_size";
static final String TOTAL_SIZE_IN_BYTES = "total_size_in_bytes";
static final String PROCESSED_SIZE_IN_BYTES = "processed_size_in_bytes";
static final String PROCESSED_SIZE = "processed_size";
}
@Override
@ -211,13 +202,7 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
builder.field(Fields.START_TIME_IN_MILLIS, getStartTime())
.humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime()));
// BWC part
return builder.field(Fields.NUMBER_OF_FILES, getIncrementalFileCount())
.field(Fields.PROCESSED_FILES, getProcessedFileCount())
.humanReadableField(Fields.TOTAL_SIZE_IN_BYTES, Fields.TOTAL_SIZE, new ByteSizeValue(getIncrementalSize()))
.humanReadableField(Fields.PROCESSED_SIZE_IN_BYTES, Fields.PROCESSED_SIZE, new ByteSizeValue(getProcessedSize()))
// BWC part ends
.endObject();
return builder.endObject();
}
void add(SnapshotStats stats) {

View File

@ -100,11 +100,7 @@ public class SnapshotStatusTests extends ESTestCase {
" \"size_in_bytes\" : 0\n" +
" },\n" +
" \"start_time_in_millis\" : 0,\n" +
" \"time_in_millis\" : 0,\n" +
" \"number_of_files\" : 0,\n" +
" \"processed_files\" : 0,\n" +
" \"total_size_in_bytes\" : 0,\n" +
" \"processed_size_in_bytes\" : 0\n" +
" \"time_in_millis\" : 0\n" +
" },\n" +
" \"indices\" : {\n" +
" \"" + indexName + "\" : {\n" +
@ -126,11 +122,7 @@ public class SnapshotStatusTests extends ESTestCase {
" \"size_in_bytes\" : 0\n" +
" },\n" +
" \"start_time_in_millis\" : 0,\n" +
" \"time_in_millis\" : 0,\n" +
" \"number_of_files\" : 0,\n" +
" \"processed_files\" : 0,\n" +
" \"total_size_in_bytes\" : 0,\n" +
" \"processed_size_in_bytes\" : 0\n" +
" \"time_in_millis\" : 0\n" +
" },\n" +
" \"shards\" : {\n" +
" \"" + shardId + "\" : {\n" +
@ -145,11 +137,7 @@ public class SnapshotStatusTests extends ESTestCase {
" \"size_in_bytes\" : 0\n" +
" },\n" +
" \"start_time_in_millis\" : 0,\n" +
" \"time_in_millis\" : 0,\n" +
" \"number_of_files\" : 0,\n" +
" \"processed_files\" : 0,\n" +
" \"total_size_in_bytes\" : 0,\n" +
" \"processed_size_in_bytes\" : 0\n" +
" \"time_in_millis\" : 0\n" +
" }\n" +
" }\n" +
" }\n" +