FEATURE: Support backup uploads/downloads directly to/from S3.
This commit is contained in:
parent
5039a6c3f1
commit
c29a4dddc1
|
@ -1,9 +1,15 @@
|
||||||
import { ajax } from "discourse/lib/ajax";
|
import { ajax } from "discourse/lib/ajax";
|
||||||
|
import computed from "ember-addons/ember-computed-decorators";
|
||||||
|
|
||||||
export default Ember.Controller.extend({
|
export default Ember.Controller.extend({
|
||||||
adminBackups: Ember.inject.controller(),
|
adminBackups: Ember.inject.controller(),
|
||||||
status: Ember.computed.alias("adminBackups.model"),
|
status: Ember.computed.alias("adminBackups.model"),
|
||||||
|
|
||||||
|
@computed
|
||||||
|
localBackupStorage() {
|
||||||
|
return this.siteSettings.backup_location === "local";
|
||||||
|
},
|
||||||
|
|
||||||
uploadLabel: function() {
|
uploadLabel: function() {
|
||||||
return I18n.t("admin.backups.upload.label");
|
return I18n.t("admin.backups.upload.label");
|
||||||
}.property(),
|
}.property(),
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import { ajax } from "discourse/lib/ajax";
|
import { ajax } from "discourse/lib/ajax";
|
||||||
import PreloadStore from "preload-store";
|
|
||||||
|
|
||||||
const Backup = Discourse.Model.extend({
|
const Backup = Discourse.Model.extend({
|
||||||
destroy() {
|
destroy() {
|
||||||
|
@ -16,9 +15,9 @@ const Backup = Discourse.Model.extend({
|
||||||
|
|
||||||
Backup.reopenClass({
|
Backup.reopenClass({
|
||||||
find() {
|
find() {
|
||||||
return PreloadStore.getAndRemove("backups", () =>
|
return ajax("/admin/backups.json").then(backups =>
|
||||||
ajax("/admin/backups.json")
|
backups.map(backup => Backup.create(backup))
|
||||||
).then(backups => backups.map(backup => Backup.create(backup)));
|
);
|
||||||
},
|
},
|
||||||
|
|
||||||
start(withUploads) {
|
start(withUploads) {
|
||||||
|
|
|
@ -151,6 +151,15 @@ export default Discourse.Route.extend({
|
||||||
message: message
|
message: message
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
},
|
||||||
|
|
||||||
|
remoteUploadSuccess() {
|
||||||
|
Backup.find().then(backups => {
|
||||||
|
this.controllerFor("adminBackupsIndex").set(
|
||||||
|
"model",
|
||||||
|
backups.map(backup => Backup.create(backup))
|
||||||
|
);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,5 +1,10 @@
|
||||||
<div class="backup-options">
|
<div class="backup-options">
|
||||||
{{resumable-upload target="/admin/backups/upload" success="uploadSuccess" error="uploadError" uploadText=uploadLabel title="admin.backups.upload.title"}}
|
{{#if localBackupStorage}}
|
||||||
|
{{resumable-upload target="/admin/backups/upload" success="uploadSuccess" error="uploadError" uploadText=uploadLabel title="admin.backups.upload.title"}}
|
||||||
|
{{else}}
|
||||||
|
{{backup-uploader done="remoteUploadSuccess"}}
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
{{#if site.isReadOnly}}
|
{{#if site.isReadOnly}}
|
||||||
{{d-button icon="eye" action="toggleReadOnlyMode" disabled=status.isOperationRunning title="admin.backups.read_only.disable.title" label="admin.backups.read_only.disable.label"}}
|
{{d-button icon="eye" action="toggleReadOnlyMode" disabled=status.isOperationRunning title="admin.backups.read_only.disable.title" label="admin.backups.read_only.disable.label"}}
|
||||||
{{else}}
|
{{else}}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
<label class="btn {{if addDisabled 'disabled'}}">
|
<label class="btn {{if addDisabled 'disabled'}}">
|
||||||
{{d-icon "upload"}}
|
{{d-icon "upload"}}
|
||||||
{{i18n 'admin.watched_words.form.upload'}}
|
{{i18n 'admin.watched_words.form.upload'}}
|
||||||
<input disabled={{addDisabled}} type="file" accept="text/plain,text/csv" style="visibility: hidden; position: absolute;" />
|
<input class="hidden-upload-field" disabled={{addDisabled}} type="file" accept="text/plain,text/csv" />
|
||||||
</label>
|
</label>
|
||||||
<br/>
|
<br/>
|
||||||
<span class="instructions">One word per line</span>
|
<span class="instructions">One word per line</span>
|
||||||
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
import { ajax } from "discourse/lib/ajax";
|
||||||
|
import computed from "ember-addons/ember-computed-decorators";
|
||||||
|
import UploadMixin from "discourse/mixins/upload";
|
||||||
|
|
||||||
|
export default Em.Component.extend(UploadMixin, {
|
||||||
|
tagName: "span",
|
||||||
|
|
||||||
|
@computed("uploading", "uploadProgress")
|
||||||
|
uploadButtonText(uploading, progress) {
|
||||||
|
return uploading
|
||||||
|
? I18n.t("admin.backups.upload.uploading_progress", { progress })
|
||||||
|
: I18n.t("admin.backups.upload.label");
|
||||||
|
},
|
||||||
|
|
||||||
|
validateUploadedFilesOptions() {
|
||||||
|
return { skipValidation: true };
|
||||||
|
},
|
||||||
|
|
||||||
|
uploadDone() {
|
||||||
|
this.sendAction("done");
|
||||||
|
},
|
||||||
|
|
||||||
|
calculateUploadUrl() {
|
||||||
|
return "";
|
||||||
|
},
|
||||||
|
|
||||||
|
uploadOptions() {
|
||||||
|
return {
|
||||||
|
type: "PUT",
|
||||||
|
dataType: "xml",
|
||||||
|
autoUpload: false
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
_init: function() {
|
||||||
|
const $upload = this.$();
|
||||||
|
|
||||||
|
$upload.on("fileuploadadd", (e, data) => {
|
||||||
|
ajax("/admin/backups/upload_url", {
|
||||||
|
data: { filename: data.files[0].name }
|
||||||
|
}).then(result => {
|
||||||
|
if (!result.success) {
|
||||||
|
bootbox.alert(result.message);
|
||||||
|
} else {
|
||||||
|
data.url = result.url;
|
||||||
|
data.submit();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}.on("didInsertElement")
|
||||||
|
});
|
|
@ -225,6 +225,7 @@ export function validateUploadedFiles(files, opts) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function validateUploadedFile(file, opts) {
|
export function validateUploadedFile(file, opts) {
|
||||||
|
if (opts.skipValidation) return true;
|
||||||
if (!authorizesOneOrMoreExtensions()) return false;
|
if (!authorizesOneOrMoreExtensions()) return false;
|
||||||
|
|
||||||
opts = opts || {};
|
opts = opts || {};
|
||||||
|
|
|
@ -2,6 +2,7 @@ import {
|
||||||
displayErrorForUpload,
|
displayErrorForUpload,
|
||||||
validateUploadedFiles
|
validateUploadedFiles
|
||||||
} from "discourse/lib/utilities";
|
} from "discourse/lib/utilities";
|
||||||
|
import getUrl from "discourse-common/lib/get-url";
|
||||||
|
|
||||||
export default Em.Mixin.create({
|
export default Em.Mixin.create({
|
||||||
uploading: false,
|
uploading: false,
|
||||||
|
@ -15,13 +16,25 @@ export default Em.Mixin.create({
|
||||||
return {};
|
return {};
|
||||||
},
|
},
|
||||||
|
|
||||||
|
calculateUploadUrl() {
|
||||||
|
return (
|
||||||
|
getUrl(this.getWithDefault("uploadUrl", "/uploads")) +
|
||||||
|
".json?client_id=" +
|
||||||
|
this.messageBus.clientId +
|
||||||
|
"&authenticity_token=" +
|
||||||
|
encodeURIComponent(Discourse.Session.currentProp("csrfToken"))
|
||||||
|
);
|
||||||
|
},
|
||||||
|
|
||||||
|
uploadOptions() {
|
||||||
|
return {};
|
||||||
|
},
|
||||||
|
|
||||||
_initialize: function() {
|
_initialize: function() {
|
||||||
const $upload = this.$(),
|
const $upload = this.$();
|
||||||
csrf = Discourse.Session.currentProp("csrfToken"),
|
const reset = () =>
|
||||||
uploadUrl = Discourse.getURL(
|
this.setProperties({ uploading: false, uploadProgress: 0 });
|
||||||
this.getWithDefault("uploadUrl", "/uploads")
|
const maxFiles = this.getWithDefault("maxFiles", 10);
|
||||||
),
|
|
||||||
reset = () => this.setProperties({ uploading: false, uploadProgress: 0 });
|
|
||||||
|
|
||||||
$upload.on("fileuploaddone", (e, data) => {
|
$upload.on("fileuploaddone", (e, data) => {
|
||||||
let upload = data.result;
|
let upload = data.result;
|
||||||
|
@ -29,20 +42,21 @@ export default Em.Mixin.create({
|
||||||
reset();
|
reset();
|
||||||
});
|
});
|
||||||
|
|
||||||
$upload.fileupload({
|
$upload.fileupload(
|
||||||
url:
|
_.merge(
|
||||||
uploadUrl +
|
{
|
||||||
".json?client_id=" +
|
url: this.calculateUploadUrl(),
|
||||||
this.messageBus.clientId +
|
dataType: "json",
|
||||||
"&authenticity_token=" +
|
replaceFileInput: false,
|
||||||
encodeURIComponent(csrf),
|
dropZone: $upload,
|
||||||
dataType: "json",
|
pasteZone: $upload
|
||||||
dropZone: $upload,
|
},
|
||||||
pasteZone: $upload
|
this.uploadOptions()
|
||||||
});
|
)
|
||||||
|
);
|
||||||
|
|
||||||
$upload.on("fileuploaddrop", (e, data) => {
|
$upload.on("fileuploaddrop", (e, data) => {
|
||||||
if (data.files.length > 10) {
|
if (data.files.length > maxFiles) {
|
||||||
bootbox.alert(I18n.t("post.errors.too_many_dragged_and_dropped_files"));
|
bootbox.alert(I18n.t("post.errors.too_many_dragged_and_dropped_files"));
|
||||||
return false;
|
return false;
|
||||||
} else {
|
} else {
|
||||||
|
@ -56,7 +70,8 @@ export default Em.Mixin.create({
|
||||||
this.validateUploadedFilesOptions()
|
this.validateUploadedFilesOptions()
|
||||||
);
|
);
|
||||||
const isValid = validateUploadedFiles(data.files, opts);
|
const isValid = validateUploadedFiles(data.files, opts);
|
||||||
let form = { type: this.get("type") };
|
const type = this.get("type");
|
||||||
|
let form = type ? { type } : {};
|
||||||
if (this.get("data")) {
|
if (this.get("data")) {
|
||||||
form = $.extend(form, this.get("data"));
|
form = $.extend(form, this.get("data"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
<label class="btn" disabled={{uploading}} title="{{i18n 'user.change_avatar.upload_title'}}">
|
<label class="btn" disabled={{uploading}} title="{{i18n 'user.change_avatar.upload_title'}}">
|
||||||
{{d-icon "picture-o"}} {{uploadButtonText}}
|
{{d-icon "picture-o"}} {{uploadButtonText}}
|
||||||
<input disabled={{uploading}} type="file" accept="image/*" style="visibility: hidden; position: absolute;" />
|
<input class="hidden-upload-field" disabled={{uploading}} type="file" accept="image/*" />
|
||||||
</label>
|
</label>
|
||||||
{{#if uploading}}
|
{{#if uploading}}
|
||||||
<span>{{i18n 'upload_selector.uploading'}} {{uploadProgress}}%</span>
|
<span>{{i18n 'upload_selector.uploading'}} {{uploadProgress}}%</span>
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
<label class="btn" disabled={{uploading}} title="{{i18n 'admin.backups.upload.title'}}">
|
||||||
|
{{d-icon "upload"}} {{uploadButtonText}}
|
||||||
|
<input class="hidden-upload-field" disabled={{uploading}} type="file" accept=".gz" />
|
||||||
|
</label>
|
|
@ -1,6 +1,6 @@
|
||||||
<label class="btn" disabled={{uploadButtonDisabled}}>
|
<label class="btn" disabled={{uploadButtonDisabled}}>
|
||||||
{{d-icon "upload"}} {{uploadButtonText}}
|
{{d-icon "upload"}} {{uploadButtonText}}
|
||||||
<input disabled={{uploading}} type="file" accept=".csv" style="visibility: hidden; position: absolute;" />
|
<input class="hidden-upload-field" disabled={{uploading}} type="file" accept=".csv" />
|
||||||
</label>
|
</label>
|
||||||
{{#if uploading}}
|
{{#if uploading}}
|
||||||
<span>{{i18n 'upload_selector.uploading'}} {{uploadProgress}}%</span>
|
<span>{{i18n 'upload_selector.uploading'}} {{uploadProgress}}%</span>
|
||||||
|
|
|
@ -2,5 +2,5 @@
|
||||||
<label class="btn btn-primary {{if addDisabled 'disabled'}}">
|
<label class="btn btn-primary {{if addDisabled 'disabled'}}">
|
||||||
{{d-icon "plus"}}
|
{{d-icon "plus"}}
|
||||||
{{i18n 'admin.emoji.add'}}
|
{{i18n 'admin.emoji.add'}}
|
||||||
<input disabled={{addDisabled}} type="file" accept=".png,.gif" style="visibility: hidden; position: absolute;" />
|
<input class="hidden-upload-field" disabled={{addDisabled}} type="file" accept=".png,.gif" />
|
||||||
</label>
|
</label>
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
<div class="image-upload-controls">
|
<div class="image-upload-controls">
|
||||||
<label class="btn pad-left no-text {{if uploading 'disabled'}}">
|
<label class="btn pad-left no-text {{if uploading 'disabled'}}">
|
||||||
{{d-icon "picture-o"}}
|
{{d-icon "picture-o"}}
|
||||||
<input disabled={{uploading}} type="file" accept="image/*" style="visibility: hidden; position: absolute;" />
|
<input class="hidden-upload-field" disabled={{uploading}} type="file" accept="image/*" />
|
||||||
</label>
|
</label>
|
||||||
{{#if backgroundStyle}}
|
{{#if backgroundStyle}}
|
||||||
<button {{action "trash"}} class="btn btn-danger pad-left no-text">{{d-icon "trash-o"}}</button>
|
<button {{action "trash"}} class="btn btn-danger pad-left no-text">{{d-icon "trash-o"}}</button>
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
<label class="btn" disabled={{uploading}} title="{{i18n "admin.site_settings.uploaded_image_list.upload.title"}}">
|
<label class="btn" disabled={{uploading}} title="{{i18n "admin.site_settings.uploaded_image_list.upload.title"}}">
|
||||||
{{d-icon "picture-o"}} {{uploadButtonText}}
|
{{d-icon "picture-o"}} {{uploadButtonText}}
|
||||||
<input disabled={{uploading}} type="file" accept="image/*" multiple style="visibility: hidden; position: absolute;" />
|
<input class="hidden-upload-field" disabled={{uploading}} type="file" accept="image/*" multiple />
|
||||||
</label>
|
</label>
|
||||||
{{#if uploading}}
|
{{#if uploading}}
|
||||||
<span>{{i18n 'upload_selector.uploading'}} {{uploadProgress}}%</span>
|
<span>{{i18n 'upload_selector.uploading'}} {{uploadProgress}}%</span>
|
||||||
|
|
|
@ -10,5 +10,5 @@
|
||||||
{{d-icon "picture-o"}}
|
{{d-icon "picture-o"}}
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
<input disabled={{uploading}} type="file" accept="image/*" style="visibility: hidden; position: absolute;" />
|
<input class="wizard-hidden-upload-field" disabled={{uploading}} type="file" accept="image/*" />
|
||||||
</label>
|
</label>
|
||||||
|
|
|
@ -14,3 +14,8 @@
|
||||||
background-size: contain;
|
background-size: contain;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.hidden-upload-field {
|
||||||
|
visibility: hidden;
|
||||||
|
position: absolute;
|
||||||
|
}
|
||||||
|
|
|
@ -328,6 +328,11 @@ body.wizard {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.wizard-hidden-upload-field {
|
||||||
|
visibility: hidden;
|
||||||
|
position: absolute;
|
||||||
|
}
|
||||||
|
|
||||||
.wizard-step-footer {
|
.wizard-step-footer {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: row;
|
flex-direction: row;
|
||||||
|
|
|
@ -1,20 +1,26 @@
|
||||||
require "backup_restore/backup_restore"
|
require "backup_restore/backup_restore"
|
||||||
|
require "backup_restore/backup_store"
|
||||||
|
|
||||||
class Admin::BackupsController < Admin::AdminController
|
class Admin::BackupsController < Admin::AdminController
|
||||||
|
|
||||||
before_action :ensure_backups_enabled
|
before_action :ensure_backups_enabled
|
||||||
skip_before_action :check_xhr, only: [:index, :show, :logs, :check_backup_chunk, :upload_backup_chunk]
|
skip_before_action :check_xhr, only: [:index, :show, :logs, :check_backup_chunk, :upload_backup_chunk]
|
||||||
|
|
||||||
def index
|
def index
|
||||||
respond_to do |format|
|
respond_to do |format|
|
||||||
format.html do
|
format.html do
|
||||||
store_preloaded("backups", MultiJson.dump(serialize_data(Backup.all, BackupSerializer)))
|
|
||||||
store_preloaded("operations_status", MultiJson.dump(BackupRestore.operations_status))
|
store_preloaded("operations_status", MultiJson.dump(BackupRestore.operations_status))
|
||||||
store_preloaded("logs", MultiJson.dump(BackupRestore.logs))
|
store_preloaded("logs", MultiJson.dump(BackupRestore.logs))
|
||||||
render "default/empty"
|
render "default/empty"
|
||||||
end
|
end
|
||||||
|
|
||||||
format.json do
|
format.json do
|
||||||
render_serialized(Backup.all, BackupSerializer)
|
store = BackupRestore::BackupStore.create
|
||||||
|
|
||||||
|
begin
|
||||||
|
render_serialized(store.files, BackupFileSerializer)
|
||||||
|
rescue BackupRestore::BackupStore::StorageError => e
|
||||||
|
render_json_error(e)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -46,8 +52,11 @@ class Admin::BackupsController < Admin::AdminController
|
||||||
end
|
end
|
||||||
|
|
||||||
def email
|
def email
|
||||||
if backup = Backup[params.fetch(:id)]
|
store = BackupRestore::BackupStore.create
|
||||||
Jobs.enqueue(:download_backup_email,
|
|
||||||
|
if store.file(params.fetch(:id)).present?
|
||||||
|
Jobs.enqueue(
|
||||||
|
:download_backup_email,
|
||||||
user_id: current_user.id,
|
user_id: current_user.id,
|
||||||
backup_file_path: url_for(controller: 'backups', action: 'show')
|
backup_file_path: url_for(controller: 'backups', action: 'show')
|
||||||
)
|
)
|
||||||
|
@ -59,28 +68,34 @@ class Admin::BackupsController < Admin::AdminController
|
||||||
end
|
end
|
||||||
|
|
||||||
def show
|
def show
|
||||||
|
|
||||||
if !EmailBackupToken.compare(current_user.id, params.fetch(:token))
|
if !EmailBackupToken.compare(current_user.id, params.fetch(:token))
|
||||||
@error = I18n.t('download_backup_mailer.no_token')
|
@error = I18n.t('download_backup_mailer.no_token')
|
||||||
|
return render template: 'admin/backups/show.html.erb', layout: 'no_ember', status: 422
|
||||||
end
|
end
|
||||||
if !@error && backup = Backup[params.fetch(:id)]
|
|
||||||
|
store = BackupRestore::BackupStore.create
|
||||||
|
|
||||||
|
if backup = store.file(params.fetch(:id), include_download_source: true)
|
||||||
EmailBackupToken.del(current_user.id)
|
EmailBackupToken.del(current_user.id)
|
||||||
StaffActionLogger.new(current_user).log_backup_download(backup)
|
StaffActionLogger.new(current_user).log_backup_download(backup)
|
||||||
headers['Content-Length'] = File.size(backup.path).to_s
|
|
||||||
send_file backup.path
|
if store.remote?
|
||||||
else
|
redirect_to backup.source
|
||||||
if @error
|
|
||||||
render template: 'admin/backups/show.html.erb', layout: 'no_ember', status: 422
|
|
||||||
else
|
else
|
||||||
render body: nil, status: 404
|
headers['Content-Length'] = File.size(backup.source).to_s
|
||||||
|
send_file backup.source
|
||||||
end
|
end
|
||||||
|
else
|
||||||
|
render body: nil, status: 404
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def destroy
|
def destroy
|
||||||
if backup = Backup[params.fetch(:id)]
|
store = BackupRestore::BackupStore.create
|
||||||
|
|
||||||
|
if backup = store.file(params.fetch(:id))
|
||||||
StaffActionLogger.new(current_user).log_backup_destroy(backup)
|
StaffActionLogger.new(current_user).log_backup_destroy(backup)
|
||||||
backup.remove
|
store.delete_file(backup.filename)
|
||||||
render body: nil
|
render body: nil
|
||||||
else
|
else
|
||||||
render body: nil, status: 404
|
render body: nil, status: 404
|
||||||
|
@ -131,13 +146,13 @@ class Admin::BackupsController < Admin::AdminController
|
||||||
end
|
end
|
||||||
|
|
||||||
def check_backup_chunk
|
def check_backup_chunk
|
||||||
identifier = params.fetch(:resumableIdentifier)
|
identifier = params.fetch(:resumableIdentifier)
|
||||||
filename = params.fetch(:resumableFilename)
|
filename = params.fetch(:resumableFilename)
|
||||||
chunk_number = params.fetch(:resumableChunkNumber)
|
chunk_number = params.fetch(:resumableChunkNumber)
|
||||||
current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i
|
current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i
|
||||||
|
|
||||||
# path to chunk file
|
# path to chunk file
|
||||||
chunk = Backup.chunk_path(identifier, filename, chunk_number)
|
chunk = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number)
|
||||||
# check chunk upload status
|
# check chunk upload status
|
||||||
status = HandleChunkUpload.check_chunk(chunk, current_chunk_size: current_chunk_size)
|
status = HandleChunkUpload.check_chunk(chunk, current_chunk_size: current_chunk_size)
|
||||||
|
|
||||||
|
@ -145,21 +160,21 @@ class Admin::BackupsController < Admin::AdminController
|
||||||
end
|
end
|
||||||
|
|
||||||
def upload_backup_chunk
|
def upload_backup_chunk
|
||||||
filename = params.fetch(:resumableFilename)
|
filename = params.fetch(:resumableFilename)
|
||||||
total_size = params.fetch(:resumableTotalSize).to_i
|
total_size = params.fetch(:resumableTotalSize).to_i
|
||||||
|
|
||||||
return render status: 415, plain: I18n.t("backup.backup_file_should_be_tar_gz") unless /\.(tar\.gz|t?gz)$/i =~ filename
|
return render status: 415, plain: I18n.t("backup.backup_file_should_be_tar_gz") unless valid_extension?(filename)
|
||||||
return render status: 415, plain: I18n.t("backup.not_enough_space_on_disk") unless has_enough_space_on_disk?(total_size)
|
return render status: 415, plain: I18n.t("backup.not_enough_space_on_disk") unless has_enough_space_on_disk?(total_size)
|
||||||
return render status: 415, plain: I18n.t("backup.invalid_filename") unless !!(/^[a-zA-Z0-9\._\-]+$/ =~ filename)
|
return render status: 415, plain: I18n.t("backup.invalid_filename") unless valid_filename?(filename)
|
||||||
|
|
||||||
file = params.fetch(:file)
|
file = params.fetch(:file)
|
||||||
identifier = params.fetch(:resumableIdentifier)
|
identifier = params.fetch(:resumableIdentifier)
|
||||||
chunk_number = params.fetch(:resumableChunkNumber).to_i
|
chunk_number = params.fetch(:resumableChunkNumber).to_i
|
||||||
chunk_size = params.fetch(:resumableChunkSize).to_i
|
chunk_size = params.fetch(:resumableChunkSize).to_i
|
||||||
current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i
|
current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i
|
||||||
|
|
||||||
# path to chunk file
|
# path to chunk file
|
||||||
chunk = Backup.chunk_path(identifier, filename, chunk_number)
|
chunk = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number)
|
||||||
# upload chunk
|
# upload chunk
|
||||||
HandleChunkUpload.upload_chunk(chunk, file: file)
|
HandleChunkUpload.upload_chunk(chunk, file: file)
|
||||||
|
|
||||||
|
@ -173,6 +188,24 @@ class Admin::BackupsController < Admin::AdminController
|
||||||
render body: nil
|
render body: nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def create_upload_url
|
||||||
|
params.require(:filename)
|
||||||
|
filename = params.fetch(:filename)
|
||||||
|
|
||||||
|
return render_error("backup.backup_file_should_be_tar_gz") unless valid_extension?(filename)
|
||||||
|
return render_error("backup.invalid_filename") unless valid_filename?(filename)
|
||||||
|
|
||||||
|
store = BackupRestore::BackupStore.create
|
||||||
|
|
||||||
|
begin
|
||||||
|
upload_url = store.generate_upload_url(filename)
|
||||||
|
rescue BackupRestore::BackupStore::BackupFileExists
|
||||||
|
return render_error("backup.file_exists")
|
||||||
|
end
|
||||||
|
|
||||||
|
render json: success_json.merge(url: upload_url)
|
||||||
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def has_enough_space_on_disk?(size)
|
def has_enough_space_on_disk?(size)
|
||||||
|
@ -183,4 +216,15 @@ class Admin::BackupsController < Admin::AdminController
|
||||||
raise Discourse::InvalidAccess.new unless SiteSetting.enable_backups?
|
raise Discourse::InvalidAccess.new unless SiteSetting.enable_backups?
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def valid_extension?(filename)
|
||||||
|
/\.(tar\.gz|t?gz)$/i =~ filename
|
||||||
|
end
|
||||||
|
|
||||||
|
def valid_filename?(filename)
|
||||||
|
!!(/^[a-zA-Z0-9\._\-]+$/ =~ filename)
|
||||||
|
end
|
||||||
|
|
||||||
|
def render_error(message_key)
|
||||||
|
render json: failed_json.merge(message: I18n.t(message_key))
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -27,8 +27,12 @@ class Admin::DashboardNextController < Admin::AdminController
|
||||||
private
|
private
|
||||||
|
|
||||||
def last_backup_taken_at
|
def last_backup_taken_at
|
||||||
if last_backup = Backup.all.first
|
store = BackupRestore::BackupStore.create
|
||||||
File.ctime(last_backup.path).utc
|
|
||||||
|
begin
|
||||||
|
store.latest_file&.last_modified
|
||||||
|
rescue BackupRestore::BackupStore::StorageError
|
||||||
|
nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
require_dependency "backup_restore/local_backup_store"
|
||||||
|
require_dependency "backup_restore/backup_store"
|
||||||
|
|
||||||
module Jobs
|
module Jobs
|
||||||
|
|
||||||
class BackupChunksMerger < Jobs::Base
|
class BackupChunksMerger < Jobs::Base
|
||||||
|
@ -12,16 +15,24 @@ module Jobs
|
||||||
raise Discourse::InvalidParameters.new(:identifier) if identifier.blank?
|
raise Discourse::InvalidParameters.new(:identifier) if identifier.blank?
|
||||||
raise Discourse::InvalidParameters.new(:chunks) if chunks <= 0
|
raise Discourse::InvalidParameters.new(:chunks) if chunks <= 0
|
||||||
|
|
||||||
backup_path = "#{Backup.base_directory}/#{filename}"
|
backup_path = "#{BackupRestore::LocalBackupStore.base_directory}/#{filename}"
|
||||||
tmp_backup_path = "#{backup_path}.tmp"
|
tmp_backup_path = "#{backup_path}.tmp"
|
||||||
# path to tmp directory
|
# path to tmp directory
|
||||||
tmp_directory = File.dirname(Backup.chunk_path(identifier, filename, 0))
|
tmp_directory = File.dirname(BackupRestore::LocalBackupStore.chunk_path(identifier, filename, 0))
|
||||||
|
|
||||||
# merge all chunks
|
# merge all chunks
|
||||||
HandleChunkUpload.merge_chunks(chunks, upload_path: backup_path, tmp_upload_path: tmp_backup_path, model: Backup, identifier: identifier, filename: filename, tmp_directory: tmp_directory)
|
HandleChunkUpload.merge_chunks(
|
||||||
|
chunks,
|
||||||
|
upload_path: backup_path,
|
||||||
|
tmp_upload_path: tmp_backup_path,
|
||||||
|
identifier: identifier,
|
||||||
|
filename: filename,
|
||||||
|
tmp_directory: tmp_directory
|
||||||
|
)
|
||||||
|
|
||||||
# push an updated list to the clients
|
# push an updated list to the clients
|
||||||
data = ActiveModel::ArraySerializer.new(Backup.all, each_serializer: BackupSerializer).as_json
|
store = BackupRestore::BackupStore.create
|
||||||
|
data = ActiveModel::ArraySerializer.new(store.files, each_serializer: BackupFileSerializer).as_json
|
||||||
MessageBus.publish("/admin/backups", data, user_ids: User.staff.pluck(:id))
|
MessageBus.publish("/admin/backups", data, user_ids: User.staff.pluck(:id))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -7,8 +7,9 @@ module Jobs
|
||||||
def execute(args)
|
def execute(args)
|
||||||
return unless SiteSetting.enable_backups? && SiteSetting.automatic_backups_enabled?
|
return unless SiteSetting.enable_backups? && SiteSetting.automatic_backups_enabled?
|
||||||
|
|
||||||
if latest_backup = Backup.all[0]
|
store = BackupRestore::BackupStore.create
|
||||||
date = File.ctime(latest_backup.path).getutc.to_date
|
if latest_backup = store.latest_file
|
||||||
|
date = latest_backup.last_modified.to_date
|
||||||
return if (date + SiteSetting.backup_frequency.days) > Time.now.utc.to_date
|
return if (date + SiteSetting.backup_frequency.days) > Time.now.utc.to_date
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -209,7 +209,7 @@ class AdminDashboardData
|
||||||
bad_keys = (SiteSetting.s3_access_key_id.blank? || SiteSetting.s3_secret_access_key.blank?) && !SiteSetting.s3_use_iam_profile
|
bad_keys = (SiteSetting.s3_access_key_id.blank? || SiteSetting.s3_secret_access_key.blank?) && !SiteSetting.s3_use_iam_profile
|
||||||
|
|
||||||
return I18n.t('dashboard.s3_config_warning') if SiteSetting.enable_s3_uploads && (bad_keys || SiteSetting.s3_upload_bucket.blank?)
|
return I18n.t('dashboard.s3_config_warning') if SiteSetting.enable_s3_uploads && (bad_keys || SiteSetting.s3_upload_bucket.blank?)
|
||||||
return I18n.t('dashboard.s3_backup_config_warning') if SiteSetting.enable_s3_backups && (bad_keys || SiteSetting.s3_backup_bucket.blank?)
|
return I18n.t('dashboard.s3_backup_config_warning') if SiteSetting.backup_location == BackupLocationSiteSetting::S3 && (bad_keys || SiteSetting.s3_backup_bucket.blank?)
|
||||||
end
|
end
|
||||||
nil
|
nil
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,92 +0,0 @@
|
||||||
require 'disk_space'
|
|
||||||
|
|
||||||
class Backup
|
|
||||||
include ActiveModel::SerializerSupport
|
|
||||||
|
|
||||||
attr_reader :filename
|
|
||||||
attr_accessor :size, :path, :link
|
|
||||||
|
|
||||||
def initialize(filename)
|
|
||||||
@filename = filename
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.all
|
|
||||||
Dir.glob(File.join(Backup.base_directory, "*.{gz,tgz}"))
|
|
||||||
.sort_by { |file| File.mtime(file) }
|
|
||||||
.reverse
|
|
||||||
.map { |backup| Backup.create_from_filename(File.basename(backup)) }
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.[](filename)
|
|
||||||
path = File.join(Backup.base_directory, filename)
|
|
||||||
if File.exists?(path)
|
|
||||||
Backup.create_from_filename(filename)
|
|
||||||
else
|
|
||||||
nil
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def remove
|
|
||||||
File.delete(@path) if File.exists?(path)
|
|
||||||
after_remove_hook
|
|
||||||
end
|
|
||||||
|
|
||||||
def after_create_hook
|
|
||||||
upload_to_s3 if SiteSetting.enable_s3_backups?
|
|
||||||
DiscourseEvent.trigger(:backup_created)
|
|
||||||
end
|
|
||||||
|
|
||||||
def after_remove_hook
|
|
||||||
remove_from_s3 if SiteSetting.enable_s3_backups? && !SiteSetting.s3_disable_cleanup?
|
|
||||||
DiskSpace.reset_cached_stats unless SiteSetting.enable_s3_backups?
|
|
||||||
end
|
|
||||||
|
|
||||||
def s3_bucket
|
|
||||||
return @s3_bucket if @s3_bucket
|
|
||||||
raise Discourse::SiteSettingMissing.new("s3_backup_bucket") if SiteSetting.s3_backup_bucket.blank?
|
|
||||||
@s3_bucket = SiteSetting.s3_backup_bucket.downcase
|
|
||||||
end
|
|
||||||
|
|
||||||
def s3
|
|
||||||
require "s3_helper" unless defined? S3Helper
|
|
||||||
@s3_helper ||= S3Helper.new(s3_bucket, '', S3Helper.s3_options(SiteSetting))
|
|
||||||
end
|
|
||||||
|
|
||||||
def upload_to_s3
|
|
||||||
return unless s3
|
|
||||||
File.open(@path) do |file|
|
|
||||||
s3.upload(file, @filename)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def remove_from_s3
|
|
||||||
return unless s3
|
|
||||||
s3.remove(@filename)
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.base_directory
|
|
||||||
base_directory = File.join(Rails.root, "public", "backups", RailsMultisite::ConnectionManagement.current_db)
|
|
||||||
FileUtils.mkdir_p(base_directory) unless Dir.exists?(base_directory)
|
|
||||||
base_directory
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.chunk_path(identifier, filename, chunk_number)
|
|
||||||
File.join(Backup.base_directory, "tmp", identifier, "#{filename}.part#{chunk_number}")
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.create_from_filename(filename)
|
|
||||||
Backup.new(filename).tap do |b|
|
|
||||||
b.path = File.join(Backup.base_directory, b.filename)
|
|
||||||
b.link = UrlHelper.schemaless "#{Discourse.base_url}/admin/backups/#{b.filename}"
|
|
||||||
b.size = File.size(b.path)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.remove_old
|
|
||||||
return if Rails.env.development?
|
|
||||||
all_backups = Backup.all
|
|
||||||
return if all_backups.size <= SiteSetting.maximum_backups
|
|
||||||
all_backups[SiteSetting.maximum_backups..-1].each(&:remove)
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
class BackupFile
|
||||||
|
include ActiveModel::SerializerSupport
|
||||||
|
|
||||||
|
attr_reader :filename,
|
||||||
|
:size,
|
||||||
|
:last_modified,
|
||||||
|
:source
|
||||||
|
|
||||||
|
def initialize(filename:, size:, last_modified:, source: nil)
|
||||||
|
@filename = filename
|
||||||
|
@size = size
|
||||||
|
@last_modified = last_modified
|
||||||
|
@source = source
|
||||||
|
end
|
||||||
|
|
||||||
|
def ==(other)
|
||||||
|
attributes == other.attributes
|
||||||
|
end
|
||||||
|
|
||||||
|
protected
|
||||||
|
|
||||||
|
def attributes
|
||||||
|
[@filename, @size, @last_modified, @source]
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,23 @@
|
||||||
|
# frozen_string_literal: true
|
||||||
|
|
||||||
|
require_dependency 'enum_site_setting';
|
||||||
|
|
||||||
|
class BackupLocationSiteSetting < EnumSiteSetting
|
||||||
|
LOCAL ||= "local"
|
||||||
|
S3 ||= "s3"
|
||||||
|
|
||||||
|
def self.valid_value?(val)
|
||||||
|
values.any? { |v| v[:value] == val }
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.values
|
||||||
|
@values ||= [
|
||||||
|
{ name: "admin.backups.location.local", value: LOCAL },
|
||||||
|
{ name: "admin.backups.location.s3", value: S3 }
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.translate_names?
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,5 @@
|
||||||
|
class BackupFileSerializer < ApplicationSerializer
|
||||||
|
attributes :filename,
|
||||||
|
:size,
|
||||||
|
:last_modified
|
||||||
|
end
|
|
@ -1,3 +0,0 @@
|
||||||
class BackupSerializer < ApplicationSerializer
|
|
||||||
attributes :filename, :size, :link
|
|
||||||
end
|
|
|
@ -36,7 +36,6 @@ class HandleChunkUpload
|
||||||
def merge_chunks
|
def merge_chunks
|
||||||
upload_path = @params[:upload_path]
|
upload_path = @params[:upload_path]
|
||||||
tmp_upload_path = @params[:tmp_upload_path]
|
tmp_upload_path = @params[:tmp_upload_path]
|
||||||
model = @params[:model]
|
|
||||||
identifier = @params[:identifier]
|
identifier = @params[:identifier]
|
||||||
filename = @params[:filename]
|
filename = @params[:filename]
|
||||||
tmp_directory = @params[:tmp_directory]
|
tmp_directory = @params[:tmp_directory]
|
||||||
|
@ -52,7 +51,7 @@ class HandleChunkUpload
|
||||||
File.open(tmp_upload_path, "a") do |file|
|
File.open(tmp_upload_path, "a") do |file|
|
||||||
(1..@chunk).each do |chunk_number|
|
(1..@chunk).each do |chunk_number|
|
||||||
# path to chunk
|
# path to chunk
|
||||||
chunk_path = model.chunk_path(identifier, filename, chunk_number)
|
chunk_path = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number)
|
||||||
# add chunk to file
|
# add chunk to file
|
||||||
file << File.open(chunk_path).read
|
file << File.open(chunk_path).read
|
||||||
end
|
end
|
||||||
|
|
|
@ -3133,6 +3133,7 @@ en:
|
||||||
label: "Upload"
|
label: "Upload"
|
||||||
title: "Upload a backup to this instance"
|
title: "Upload a backup to this instance"
|
||||||
uploading: "Uploading..."
|
uploading: "Uploading..."
|
||||||
|
uploading_progress: "Uploading... {{progress}}%"
|
||||||
success: "'{{filename}}' has successfully been uploaded. The file is now being processed and will take up to a minute to show up in the list."
|
success: "'{{filename}}' has successfully been uploaded. The file is now being processed and will take up to a minute to show up in the list."
|
||||||
error: "There has been an error while uploading '{{filename}}': {{message}}"
|
error: "There has been an error while uploading '{{filename}}': {{message}}"
|
||||||
operations:
|
operations:
|
||||||
|
@ -3163,6 +3164,9 @@ en:
|
||||||
label: "Rollback"
|
label: "Rollback"
|
||||||
title: "Rollback the database to previous working state"
|
title: "Rollback the database to previous working state"
|
||||||
confirm: "Are you sure you want to rollback the database to the previous working state?"
|
confirm: "Are you sure you want to rollback the database to the previous working state?"
|
||||||
|
location:
|
||||||
|
local: "Local"
|
||||||
|
s3: "Amazon S3"
|
||||||
|
|
||||||
export_csv:
|
export_csv:
|
||||||
success: "Export initiated, you will be notified via message when the process is complete."
|
success: "Export initiated, you will be notified via message when the process is complete."
|
||||||
|
|
|
@ -166,6 +166,7 @@ en:
|
||||||
other: 'You specified the invalid choices %{name}'
|
other: 'You specified the invalid choices %{name}'
|
||||||
default_categories_already_selected: "You cannot select a category used in another list."
|
default_categories_already_selected: "You cannot select a category used in another list."
|
||||||
s3_upload_bucket_is_required: "You cannot enable uploads to S3 unless you've provided the 's3_upload_bucket'."
|
s3_upload_bucket_is_required: "You cannot enable uploads to S3 unless you've provided the 's3_upload_bucket'."
|
||||||
|
s3_backup_requires_s3_settings: "You cannot use S3 as backup location unless you've provided the '%{setting_name}'."
|
||||||
conflicting_google_user_id: 'The Google Account ID for this account has changed; staff intervention is required for security reasons. Please contact staff and point them to <br><a href="https://meta.discourse.org/t/76575">https://meta.discourse.org/t/76575</a>'
|
conflicting_google_user_id: 'The Google Account ID for this account has changed; staff intervention is required for security reasons. Please contact staff and point them to <br><a href="https://meta.discourse.org/t/76575">https://meta.discourse.org/t/76575</a>'
|
||||||
|
|
||||||
activemodel:
|
activemodel:
|
||||||
|
@ -194,6 +195,10 @@ en:
|
||||||
backup_file_should_be_tar_gz: "The backup file should be a .tar.gz archive."
|
backup_file_should_be_tar_gz: "The backup file should be a .tar.gz archive."
|
||||||
not_enough_space_on_disk: "There is not enough space on disk to upload this backup."
|
not_enough_space_on_disk: "There is not enough space on disk to upload this backup."
|
||||||
invalid_filename: "The backup filename contains invalid characters. Valid characters are a-z 0-9 . - _."
|
invalid_filename: "The backup filename contains invalid characters. Valid characters are a-z 0-9 . - _."
|
||||||
|
file_exists: "The file you are trying to upload already exists."
|
||||||
|
location:
|
||||||
|
local: "Local"
|
||||||
|
s3: "Amazon S3"
|
||||||
|
|
||||||
invalid_params: "You supplied invalid parameters to the request: %{message}"
|
invalid_params: "You supplied invalid parameters to the request: %{message}"
|
||||||
not_logged_in: "You need to be logged in to do that."
|
not_logged_in: "You need to be logged in to do that."
|
||||||
|
@ -1359,7 +1364,6 @@ en:
|
||||||
maximum_backups: "The maximum amount of backups to keep on disk. Older backups are automatically deleted"
|
maximum_backups: "The maximum amount of backups to keep on disk. Older backups are automatically deleted"
|
||||||
automatic_backups_enabled: "Run automatic backups as defined in backup frequency"
|
automatic_backups_enabled: "Run automatic backups as defined in backup frequency"
|
||||||
backup_frequency: "The number of days between backups."
|
backup_frequency: "The number of days between backups."
|
||||||
enable_s3_backups: "Upload backups to S3 when complete. IMPORTANT: requires valid S3 credentials entered in Files settings."
|
|
||||||
s3_backup_bucket: "The remote bucket to hold backups. WARNING: Make sure it is a private bucket."
|
s3_backup_bucket: "The remote bucket to hold backups. WARNING: Make sure it is a private bucket."
|
||||||
s3_endpoint: "The endpoint can be modified to backup to an S3 compatible service like DigitalOcean Spaces or Minio. WARNING: Use default if using AWS S3"
|
s3_endpoint: "The endpoint can be modified to backup to an S3 compatible service like DigitalOcean Spaces or Minio. WARNING: Use default if using AWS S3"
|
||||||
s3_force_path_style: "Enforce path-style addressing for your custom endpoint. IMPORTANT: Required for using Minio uploads and backups."
|
s3_force_path_style: "Enforce path-style addressing for your custom endpoint. IMPORTANT: Required for using Minio uploads and backups."
|
||||||
|
@ -1367,6 +1371,7 @@ en:
|
||||||
s3_disable_cleanup: "Disable the removal of backups from S3 when removed locally."
|
s3_disable_cleanup: "Disable the removal of backups from S3 when removed locally."
|
||||||
backup_time_of_day: "Time of day UTC when the backup should occur."
|
backup_time_of_day: "Time of day UTC when the backup should occur."
|
||||||
backup_with_uploads: "Include uploads in scheduled backups. Disabling this will only backup the database."
|
backup_with_uploads: "Include uploads in scheduled backups. Disabling this will only backup the database."
|
||||||
|
backup_location: "Location where backups are stored. IMPORTANT: S3 requires valid S3 credentials entered in Files settings."
|
||||||
|
|
||||||
active_user_rate_limit_secs: "How frequently we update the 'last_seen_at' field, in seconds"
|
active_user_rate_limit_secs: "How frequently we update the 'last_seen_at' field, in seconds"
|
||||||
verbose_localization: "Show extended localization tips in the UI"
|
verbose_localization: "Show extended localization tips in the UI"
|
||||||
|
|
|
@ -280,6 +280,7 @@ Discourse::Application.routes.draw do
|
||||||
put "readonly" => "backups#readonly"
|
put "readonly" => "backups#readonly"
|
||||||
get "upload" => "backups#check_backup_chunk"
|
get "upload" => "backups#check_backup_chunk"
|
||||||
post "upload" => "backups#upload_backup_chunk"
|
post "upload" => "backups#upload_backup_chunk"
|
||||||
|
get "upload_url" => "backups#create_upload_url"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -1404,6 +1404,12 @@ backups:
|
||||||
allow_restore:
|
allow_restore:
|
||||||
default: false
|
default: false
|
||||||
shadowed_by_global: true
|
shadowed_by_global: true
|
||||||
|
backup_location:
|
||||||
|
default: 'local'
|
||||||
|
type: enum
|
||||||
|
enum: 'BackupLocationSiteSetting'
|
||||||
|
shadowed_by_global: true
|
||||||
|
client: true
|
||||||
maximum_backups:
|
maximum_backups:
|
||||||
client: true
|
client: true
|
||||||
default: 5
|
default: 5
|
||||||
|
@ -1416,9 +1422,6 @@ backups:
|
||||||
max: 30
|
max: 30
|
||||||
default: 7
|
default: 7
|
||||||
shadowed_by_global: true
|
shadowed_by_global: true
|
||||||
enable_s3_backups:
|
|
||||||
default: false
|
|
||||||
shadowed_by_global: true
|
|
||||||
s3_backup_bucket:
|
s3_backup_bucket:
|
||||||
default: ''
|
default: ''
|
||||||
regex: '^[a-z0-9\-\/]+$' # can't use '.' when using HTTPS
|
regex: '^[a-z0-9\-\/]+$' # can't use '.' when using HTTPS
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
class MigrateS3BackupSiteSettings < ActiveRecord::Migration[5.2]
|
||||||
|
def up
|
||||||
|
execute <<~SQL
|
||||||
|
UPDATE site_settings
|
||||||
|
SET name = 'backup_location',
|
||||||
|
data_type = 7,
|
||||||
|
value = 's3'
|
||||||
|
WHERE name = 'enable_s3_backups' AND value = 't';
|
||||||
|
SQL
|
||||||
|
|
||||||
|
execute <<~SQL
|
||||||
|
DELETE
|
||||||
|
FROM site_settings
|
||||||
|
WHERE name = 'enable_s3_backups';
|
||||||
|
SQL
|
||||||
|
end
|
||||||
|
|
||||||
|
def down
|
||||||
|
execute <<~SQL
|
||||||
|
UPDATE site_settings
|
||||||
|
SET name = 'enable_s3_backups',
|
||||||
|
data_type = 5,
|
||||||
|
value = 't'
|
||||||
|
WHERE name = 'backup_location' AND value = 's3';
|
||||||
|
SQL
|
||||||
|
|
||||||
|
execute <<~SQL
|
||||||
|
DELETE
|
||||||
|
FROM site_settings
|
||||||
|
WHERE name = 'backup_location';
|
||||||
|
SQL
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,5 +1,5 @@
|
||||||
require "backup_restore/backuper"
|
require_dependency "backup_restore/backuper"
|
||||||
require "backup_restore/restorer"
|
require_dependency "backup_restore/restorer"
|
||||||
|
|
||||||
module BackupRestore
|
module BackupRestore
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
module BackupRestore
|
||||||
|
# @abstract
|
||||||
|
class BackupStore
|
||||||
|
class BackupFileExists < RuntimeError; end
|
||||||
|
class StorageError < RuntimeError; end
|
||||||
|
|
||||||
|
# @return [BackupStore]
|
||||||
|
def self.create(opts = {})
|
||||||
|
case SiteSetting.backup_location
|
||||||
|
when BackupLocationSiteSetting::LOCAL
|
||||||
|
require_dependency "backup_restore/local_backup_store"
|
||||||
|
BackupRestore::LocalBackupStore.new(opts)
|
||||||
|
when BackupLocationSiteSetting::S3
|
||||||
|
require_dependency "backup_restore/s3_backup_store"
|
||||||
|
BackupRestore::S3BackupStore.new(opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [Array<BackupFile>]
|
||||||
|
def files
|
||||||
|
unsorted_files.sort_by { |file| -file.last_modified.to_i }
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [BackupFile]
|
||||||
|
def latest_file
|
||||||
|
files.first
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete_old
|
||||||
|
return unless cleanup_allowed?
|
||||||
|
return if (backup_files = files).size <= SiteSetting.maximum_backups
|
||||||
|
|
||||||
|
backup_files[SiteSetting.maximum_backups..-1].each do |file|
|
||||||
|
delete_file(file.filename)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def remote?
|
||||||
|
fail NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
# @return [BackupFile]
|
||||||
|
def file(filename, include_download_source: false)
|
||||||
|
fail NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete_file(filename)
|
||||||
|
fail NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
def download_file(filename, destination, failure_message = nil)
|
||||||
|
fail NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
def upload_file(filename, source_path, content_type)
|
||||||
|
fail NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
def generate_upload_url(filename)
|
||||||
|
fail NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
# @return [Array<BackupFile>]
|
||||||
|
def unsorted_files
|
||||||
|
fail NotImplementedError
|
||||||
|
end
|
||||||
|
|
||||||
|
def cleanup_allowed?
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,4 +1,5 @@
|
||||||
require 'disk_space'
|
require "disk_space"
|
||||||
|
require "mini_mime"
|
||||||
|
|
||||||
module BackupRestore
|
module BackupRestore
|
||||||
|
|
||||||
|
@ -10,6 +11,7 @@ module BackupRestore
|
||||||
@client_id = opts[:client_id]
|
@client_id = opts[:client_id]
|
||||||
@publish_to_message_bus = opts[:publish_to_message_bus] || false
|
@publish_to_message_bus = opts[:publish_to_message_bus] || false
|
||||||
@with_uploads = opts[:with_uploads].nil? ? true : opts[:with_uploads]
|
@with_uploads = opts[:with_uploads].nil? ? true : opts[:with_uploads]
|
||||||
|
@filename_override = opts[:filename]
|
||||||
|
|
||||||
ensure_no_operation_is_running
|
ensure_no_operation_is_running
|
||||||
ensure_we_have_a_user
|
ensure_we_have_a_user
|
||||||
|
@ -42,6 +44,7 @@ module BackupRestore
|
||||||
log "Finalizing backup..."
|
log "Finalizing backup..."
|
||||||
|
|
||||||
@with_uploads ? create_archive : move_dump_backup
|
@with_uploads ? create_archive : move_dump_backup
|
||||||
|
upload_archive
|
||||||
|
|
||||||
after_create_hook
|
after_create_hook
|
||||||
rescue SystemExit
|
rescue SystemExit
|
||||||
|
@ -52,9 +55,9 @@ module BackupRestore
|
||||||
@success = false
|
@success = false
|
||||||
else
|
else
|
||||||
@success = true
|
@success = true
|
||||||
File.join(@archive_directory, @backup_filename)
|
@backup_filename
|
||||||
ensure
|
ensure
|
||||||
remove_old
|
delete_old
|
||||||
clean_up
|
clean_up
|
||||||
notify_user
|
notify_user
|
||||||
log "Finished!"
|
log "Finished!"
|
||||||
|
@ -75,12 +78,14 @@ module BackupRestore
|
||||||
|
|
||||||
def initialize_state
|
def initialize_state
|
||||||
@success = false
|
@success = false
|
||||||
|
@store = BackupRestore::BackupStore.create
|
||||||
@current_db = RailsMultisite::ConnectionManagement.current_db
|
@current_db = RailsMultisite::ConnectionManagement.current_db
|
||||||
@timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S")
|
@timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S")
|
||||||
@tmp_directory = File.join(Rails.root, "tmp", "backups", @current_db, @timestamp)
|
@tmp_directory = File.join(Rails.root, "tmp", "backups", @current_db, @timestamp)
|
||||||
@dump_filename = File.join(@tmp_directory, BackupRestore::DUMP_FILE)
|
@dump_filename = File.join(@tmp_directory, BackupRestore::DUMP_FILE)
|
||||||
@archive_directory = File.join(Rails.root, "public", "backups", @current_db)
|
@archive_directory = BackupRestore::LocalBackupStore.base_directory(@current_db)
|
||||||
@archive_basename = File.join(@archive_directory, "#{SiteSetting.title.parameterize}-#{@timestamp}-#{BackupRestore::VERSION_PREFIX}#{BackupRestore.current_version}")
|
filename = @filename_override || "#{SiteSetting.title.parameterize}-#{@timestamp}"
|
||||||
|
@archive_basename = File.join(@archive_directory, "#{filename}-#{BackupRestore::VERSION_PREFIX}#{BackupRestore.current_version}")
|
||||||
|
|
||||||
@backup_filename =
|
@backup_filename =
|
||||||
if @with_uploads
|
if @with_uploads
|
||||||
|
@ -195,8 +200,10 @@ module BackupRestore
|
||||||
def move_dump_backup
|
def move_dump_backup
|
||||||
log "Finalizing database dump file: #{@backup_filename}"
|
log "Finalizing database dump file: #{@backup_filename}"
|
||||||
|
|
||||||
|
archive_filename = File.join(@archive_directory, @backup_filename)
|
||||||
|
|
||||||
Discourse::Utils.execute_command(
|
Discourse::Utils.execute_command(
|
||||||
'mv', @dump_filename, File.join(@archive_directory, @backup_filename),
|
'mv', @dump_filename, archive_filename,
|
||||||
failure_message: "Failed to move database dump file."
|
failure_message: "Failed to move database dump file."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -243,17 +250,30 @@ module BackupRestore
|
||||||
Discourse::Utils.execute_command('gzip', '-5', tar_filename, failure_message: "Failed to gzip archive.")
|
Discourse::Utils.execute_command('gzip', '-5', tar_filename, failure_message: "Failed to gzip archive.")
|
||||||
end
|
end
|
||||||
|
|
||||||
def after_create_hook
|
def upload_archive
|
||||||
log "Executing the after_create_hook for the backup..."
|
return unless @store.remote?
|
||||||
backup = Backup.create_from_filename(@backup_filename)
|
|
||||||
backup.after_create_hook
|
log "Uploading archive..."
|
||||||
|
content_type = MiniMime.lookup_by_filename(@backup_filename).content_type
|
||||||
|
archive_path = File.join(@archive_directory, @backup_filename)
|
||||||
|
@store.upload_file(@backup_filename, archive_path, content_type)
|
||||||
|
ensure
|
||||||
|
log "Removing archive from local storage..."
|
||||||
|
FileUtils.remove_file(archive_path, force: true)
|
||||||
end
|
end
|
||||||
|
|
||||||
def remove_old
|
def after_create_hook
|
||||||
log "Removing old backups..."
|
log "Executing the after_create_hook for the backup..."
|
||||||
Backup.remove_old
|
DiscourseEvent.trigger(:backup_created)
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete_old
|
||||||
|
return if Rails.env.development?
|
||||||
|
|
||||||
|
log "Deleting old backups..."
|
||||||
|
@store.delete_old
|
||||||
rescue => ex
|
rescue => ex
|
||||||
log "Something went wrong while removing old backups.", ex
|
log "Something went wrong while deleting old backups.", ex
|
||||||
end
|
end
|
||||||
|
|
||||||
def notify_user
|
def notify_user
|
||||||
|
|
|
@ -0,0 +1,65 @@
|
||||||
|
require_dependency "backup_restore/backup_store"
|
||||||
|
require_dependency "disk_space"
|
||||||
|
|
||||||
|
module BackupRestore
|
||||||
|
class LocalBackupStore < BackupStore
|
||||||
|
def self.base_directory(current_db = nil)
|
||||||
|
current_db ||= RailsMultisite::ConnectionManagement.current_db
|
||||||
|
base_directory = File.join(Rails.root, "public", "backups", current_db)
|
||||||
|
FileUtils.mkdir_p(base_directory) unless Dir.exists?(base_directory)
|
||||||
|
base_directory
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.chunk_path(identifier, filename, chunk_number)
|
||||||
|
File.join(LocalBackupStore.base_directory, "tmp", identifier, "#{filename}.part#{chunk_number}")
|
||||||
|
end
|
||||||
|
|
||||||
|
def initialize(opts = {})
|
||||||
|
@base_directory = opts[:base_directory] || LocalBackupStore.base_directory
|
||||||
|
end
|
||||||
|
|
||||||
|
def remote?
|
||||||
|
false
|
||||||
|
end
|
||||||
|
|
||||||
|
def file(filename, include_download_source: false)
|
||||||
|
path = path_from_filename(filename)
|
||||||
|
create_file_from_path(path, include_download_source) if File.exists?(path)
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete_file(filename)
|
||||||
|
path = path_from_filename(filename)
|
||||||
|
|
||||||
|
if File.exists?(path)
|
||||||
|
FileUtils.remove_file(path, force: true)
|
||||||
|
DiskSpace.reset_cached_stats
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def download_file(filename, destination, failure_message = "")
|
||||||
|
path = path_from_filename(filename)
|
||||||
|
Discourse::Utils.execute_command('cp', path, destination, failure_message: failure_message)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def unsorted_files
|
||||||
|
files = Dir.glob(File.join(@base_directory, "*.{gz,tgz}"))
|
||||||
|
files.map! { |filename| create_file_from_path(filename) }
|
||||||
|
files
|
||||||
|
end
|
||||||
|
|
||||||
|
def path_from_filename(filename)
|
||||||
|
File.join(@base_directory, filename)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_file_from_path(path, include_download_source = false)
|
||||||
|
BackupFile.new(
|
||||||
|
filename: File.basename(path),
|
||||||
|
size: File.size(path),
|
||||||
|
last_modified: File.mtime(path).utc,
|
||||||
|
source: include_download_source ? path : nil
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -133,12 +133,12 @@ module BackupRestore
|
||||||
|
|
||||||
def initialize_state
|
def initialize_state
|
||||||
@success = false
|
@success = false
|
||||||
|
@store = BackupRestore::BackupStore.create
|
||||||
@db_was_changed = false
|
@db_was_changed = false
|
||||||
@current_db = RailsMultisite::ConnectionManagement.current_db
|
@current_db = RailsMultisite::ConnectionManagement.current_db
|
||||||
@current_version = BackupRestore.current_version
|
@current_version = BackupRestore.current_version
|
||||||
@timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S")
|
@timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S")
|
||||||
@tmp_directory = File.join(Rails.root, "tmp", "restores", @current_db, @timestamp)
|
@tmp_directory = File.join(Rails.root, "tmp", "restores", @current_db, @timestamp)
|
||||||
@source_filename = File.join(Backup.base_directory, @filename)
|
|
||||||
@archive_filename = File.join(@tmp_directory, @filename)
|
@archive_filename = File.join(@tmp_directory, @filename)
|
||||||
@tar_filename = @archive_filename[0...-3]
|
@tar_filename = @archive_filename[0...-3]
|
||||||
@meta_filename = File.join(@tmp_directory, BackupRestore::METADATA_FILE)
|
@meta_filename = File.join(@tmp_directory, BackupRestore::METADATA_FILE)
|
||||||
|
@ -195,8 +195,15 @@ module BackupRestore
|
||||||
end
|
end
|
||||||
|
|
||||||
def copy_archive_to_tmp_directory
|
def copy_archive_to_tmp_directory
|
||||||
log "Copying archive to tmp directory..."
|
if @store.remote?
|
||||||
Discourse::Utils.execute_command('cp', @source_filename, @archive_filename, failure_message: "Failed to copy archive to tmp directory.")
|
log "Downloading archive to tmp directory..."
|
||||||
|
failure_message = "Failed to download archive to tmp directory."
|
||||||
|
else
|
||||||
|
log "Copying archive to tmp directory..."
|
||||||
|
failure_message = "Failed to copy archive to tmp directory."
|
||||||
|
end
|
||||||
|
|
||||||
|
@store.download_file(@filename, @archive_filename, failure_message)
|
||||||
end
|
end
|
||||||
|
|
||||||
def unzip_archive
|
def unzip_archive
|
||||||
|
|
|
@ -0,0 +1,95 @@
|
||||||
|
require_dependency "backup_restore/backup_store"
|
||||||
|
require_dependency "s3_helper"
|
||||||
|
|
||||||
|
module BackupRestore
|
||||||
|
class S3BackupStore < BackupStore
|
||||||
|
DOWNLOAD_URL_EXPIRES_AFTER_SECONDS ||= 15
|
||||||
|
UPLOAD_URL_EXPIRES_AFTER_SECONDS ||= 21_600 # 6 hours
|
||||||
|
|
||||||
|
def initialize(opts = {})
|
||||||
|
s3_options = S3Helper.s3_options(SiteSetting)
|
||||||
|
s3_options.merge!(opts[:s3_options]) if opts[:s3_options]
|
||||||
|
@s3_helper = S3Helper.new(SiteSetting.s3_backup_bucket, '', s3_options)
|
||||||
|
end
|
||||||
|
|
||||||
|
def remote?
|
||||||
|
true
|
||||||
|
end
|
||||||
|
|
||||||
|
def file(filename, include_download_source: false)
|
||||||
|
obj = @s3_helper.object(filename)
|
||||||
|
create_file_from_object(obj, include_download_source) if obj.exists?
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete_file(filename)
|
||||||
|
obj = @s3_helper.object(filename)
|
||||||
|
obj.delete if obj.exists?
|
||||||
|
end
|
||||||
|
|
||||||
|
def download_file(filename, destination_path, failure_message = nil)
|
||||||
|
unless @s3_helper.object(filename).download_file(destination_path)
|
||||||
|
raise failure_message&.to_s || "Failed to download file"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def upload_file(filename, source_path, content_type)
|
||||||
|
obj = @s3_helper.object(filename)
|
||||||
|
raise BackupFileExists.new if obj.exists?
|
||||||
|
|
||||||
|
obj.upload_file(source_path, content_type: content_type)
|
||||||
|
end
|
||||||
|
|
||||||
|
def generate_upload_url(filename)
|
||||||
|
obj = @s3_helper.object(filename)
|
||||||
|
raise BackupFileExists.new if obj.exists?
|
||||||
|
|
||||||
|
presigned_url(obj, :put, UPLOAD_URL_EXPIRES_AFTER_SECONDS)
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def unsorted_files
|
||||||
|
objects = []
|
||||||
|
|
||||||
|
@s3_helper.list.each do |obj|
|
||||||
|
if obj.key.match?(/\.t?gz$/i)
|
||||||
|
objects << create_file_from_object(obj)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
objects
|
||||||
|
rescue Aws::Errors::ServiceError => e
|
||||||
|
Rails.logger.warn("Failed to list backups from S3: #{e.message.presence || e.class.name}")
|
||||||
|
raise StorageError
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_file_from_object(obj, include_download_source = false)
|
||||||
|
BackupFile.new(
|
||||||
|
filename: File.basename(obj.key),
|
||||||
|
size: obj.size,
|
||||||
|
last_modified: obj.last_modified,
|
||||||
|
source: include_download_source ? presigned_url(obj, :get, DOWNLOAD_URL_EXPIRES_AFTER_SECONDS) : nil
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def presigned_url(obj, method, expires_in_seconds)
|
||||||
|
ensure_cors!
|
||||||
|
obj.presigned_url(method, expires_in: expires_in_seconds)
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_cors!
|
||||||
|
rule = {
|
||||||
|
allowed_headers: ["*"],
|
||||||
|
allowed_methods: ["PUT"],
|
||||||
|
allowed_origins: [Discourse.base_url_no_prefix],
|
||||||
|
max_age_seconds: 3000
|
||||||
|
}
|
||||||
|
|
||||||
|
@s3_helper.ensure_cors!([rule])
|
||||||
|
end
|
||||||
|
|
||||||
|
def cleanup_allowed?
|
||||||
|
!SiteSetting.s3_disable_cleanup
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -2,8 +2,8 @@ class DiskSpace
|
||||||
|
|
||||||
extend ActionView::Helpers::NumberHelper
|
extend ActionView::Helpers::NumberHelper
|
||||||
|
|
||||||
DISK_SPACE_STATS_CACHE_KEY = 'disk_space_stats'.freeze
|
DISK_SPACE_STATS_CACHE_KEY ||= 'disk_space_stats'.freeze
|
||||||
DISK_SPACE_STATS_UPDATED_CACHE_KEY = 'disk_space_stats_updated'.freeze
|
DISK_SPACE_STATS_UPDATED_CACHE_KEY ||= 'disk_space_stats_updated'.freeze
|
||||||
|
|
||||||
def self.uploads_used_bytes
|
def self.uploads_used_bytes
|
||||||
# used(uploads_path)
|
# used(uploads_path)
|
||||||
|
@ -24,7 +24,7 @@ class DiskSpace
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.backups_path
|
def self.backups_path
|
||||||
Backup.base_directory
|
BackupRestore::LocalBackupStore.base_directory
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.uploads_path
|
def self.uploads_path
|
||||||
|
|
|
@ -24,6 +24,12 @@ module JsonError
|
||||||
# If we're passed an array, it's an array of error messages
|
# If we're passed an array, it's an array of error messages
|
||||||
return { errors: obj.map(&:to_s) } if obj.is_a?(Array) && obj.present?
|
return { errors: obj.map(&:to_s) } if obj.is_a?(Array) && obj.present?
|
||||||
|
|
||||||
|
if obj.is_a?(Exception)
|
||||||
|
message = obj.cause.message.presence || obj.cause.class.name if obj.cause
|
||||||
|
message = obj.message.presence || obj.class.name if message.blank?
|
||||||
|
return { errors: [message] } if message.present?
|
||||||
|
end
|
||||||
|
|
||||||
# Log a warning (unless obj is nil)
|
# Log a warning (unless obj is nil)
|
||||||
Rails.logger.warn("create_errors_json called with unrecognized type: #{obj.inspect}") if obj
|
Rails.logger.warn("create_errors_json called with unrecognized type: #{obj.inspect}") if obj
|
||||||
|
|
||||||
|
|
|
@ -51,7 +51,7 @@ class S3Helper
|
||||||
|
|
||||||
# make sure we have a cors config for assets
|
# make sure we have a cors config for assets
|
||||||
# otherwise we will have no fonts
|
# otherwise we will have no fonts
|
||||||
def ensure_cors!
|
def ensure_cors!(rules = nil)
|
||||||
rule = nil
|
rule = nil
|
||||||
|
|
||||||
begin
|
begin
|
||||||
|
@ -63,17 +63,17 @@ class S3Helper
|
||||||
end
|
end
|
||||||
|
|
||||||
unless rule
|
unless rule
|
||||||
puts "installing CORS rule"
|
rules = [{
|
||||||
|
allowed_headers: ["Authorization"],
|
||||||
|
allowed_methods: ["GET", "HEAD"],
|
||||||
|
allowed_origins: ["*"],
|
||||||
|
max_age_seconds: 3000
|
||||||
|
}] if rules.nil?
|
||||||
|
|
||||||
s3_resource.client.put_bucket_cors(
|
s3_resource.client.put_bucket_cors(
|
||||||
bucket: @s3_bucket_name,
|
bucket: @s3_bucket_name,
|
||||||
cors_configuration: {
|
cors_configuration: {
|
||||||
cors_rules: [{
|
cors_rules: rules
|
||||||
allowed_headers: ["Authorization"],
|
|
||||||
allowed_methods: ["GET", "HEAD"],
|
|
||||||
allowed_origins: ["*"],
|
|
||||||
max_age_seconds: 3000
|
|
||||||
}]
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
@ -137,10 +137,7 @@ class S3Helper
|
||||||
end
|
end
|
||||||
|
|
||||||
def list(prefix = "")
|
def list(prefix = "")
|
||||||
if @s3_bucket_folder_path.present?
|
prefix = get_path_for_s3_upload(prefix)
|
||||||
prefix = File.join(@s3_bucket_folder_path, prefix)
|
|
||||||
end
|
|
||||||
|
|
||||||
s3_bucket.objects(prefix: prefix)
|
s3_bucket.objects(prefix: prefix)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -159,6 +156,11 @@ class S3Helper
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def object(path)
|
||||||
|
path = get_path_for_s3_upload(path)
|
||||||
|
s3_bucket.object(path)
|
||||||
|
end
|
||||||
|
|
||||||
def self.s3_options(obj)
|
def self.s3_options(obj)
|
||||||
opts = { region: obj.s3_region,
|
opts = { region: obj.s3_region,
|
||||||
endpoint: SiteSetting.s3_endpoint,
|
endpoint: SiteSetting.s3_endpoint,
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
module SiteSettings; end
|
module SiteSettings; end
|
||||||
|
|
||||||
module SiteSettings::Validations
|
module SiteSettings::Validations
|
||||||
def validate_error(key)
|
def validate_error(key, opts = {})
|
||||||
raise Discourse::InvalidParameters.new(I18n.t("errors.site_settings.#{key}"))
|
raise Discourse::InvalidParameters.new(I18n.t("errors.site_settings.#{key}", opts))
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_default_categories(new_val, default_categories_selected)
|
def validate_default_categories(new_val, default_categories_selected)
|
||||||
|
@ -53,4 +53,13 @@ module SiteSettings::Validations
|
||||||
validate_error :s3_upload_bucket_is_required if new_val == "t" && SiteSetting.s3_upload_bucket.blank?
|
validate_error :s3_upload_bucket_is_required if new_val == "t" && SiteSetting.s3_upload_bucket.blank?
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def validate_backup_location(new_val)
|
||||||
|
return unless new_val == BackupLocationSiteSetting::S3
|
||||||
|
validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_backup_bucket") if SiteSetting.s3_backup_bucket.blank?
|
||||||
|
|
||||||
|
unless SiteSetting.s3_use_iam_profile
|
||||||
|
validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_access_key_id") if SiteSetting.s3_access_key_id.blank?
|
||||||
|
validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_secret_access_key") if SiteSetting.s3_secret_access_key.blank?
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -84,6 +84,8 @@ end
|
||||||
|
|
||||||
task 's3:upload_assets' => :environment do
|
task 's3:upload_assets' => :environment do
|
||||||
ensure_s3_configured!
|
ensure_s3_configured!
|
||||||
|
|
||||||
|
puts "installing CORS rule"
|
||||||
helper.ensure_cors!
|
helper.ensure_cors!
|
||||||
|
|
||||||
assets.each do |asset|
|
assets.each do |asset|
|
||||||
|
|
|
@ -62,18 +62,20 @@ class DiscourseCLI < Thor
|
||||||
require "backup_restore/backuper"
|
require "backup_restore/backuper"
|
||||||
|
|
||||||
puts "Starting backup..."
|
puts "Starting backup..."
|
||||||
backuper = BackupRestore::Backuper.new(Discourse.system_user.id)
|
backuper = BackupRestore::Backuper.new(Discourse.system_user.id, filename: filename)
|
||||||
backup = backuper.run
|
backup_filename = backuper.run
|
||||||
if filename.present?
|
|
||||||
puts "Moving '#{backup}' to '#{filename}'"
|
|
||||||
puts "Including version number into '#{filename}'"
|
|
||||||
version_string = File.basename(backup)[/-#{BackupRestore::VERSION_PREFIX}\d{14}/]
|
|
||||||
filename = filename.dup.insert(filename.index('.'), version_string)
|
|
||||||
FileUtils.mv(backup, filename)
|
|
||||||
backup = filename
|
|
||||||
end
|
|
||||||
puts "Backup done."
|
puts "Backup done."
|
||||||
puts "Output file is in: #{backup}", ""
|
|
||||||
|
store = BackupRestore::BackupStore.create
|
||||||
|
|
||||||
|
if store.remote?
|
||||||
|
location = BackupLocationSiteSetting.values.find { |v| v[:value] == SiteSetting.backup_location }
|
||||||
|
location = I18n.t("admin_js.#{location[:name]}") if location
|
||||||
|
puts "Output file is stored on #{location} as #{backup_filename}", ""
|
||||||
|
else
|
||||||
|
backup = store.file(backup_filename, include_download_source: true)
|
||||||
|
puts "Output file is in: #{backup.source}", ""
|
||||||
|
end
|
||||||
|
|
||||||
exit(1) unless backuper.success
|
exit(1) unless backuper.success
|
||||||
end
|
end
|
||||||
|
@ -92,20 +94,22 @@ class DiscourseCLI < Thor
|
||||||
discourse = './script/discourse'
|
discourse = './script/discourse'
|
||||||
end
|
end
|
||||||
|
|
||||||
|
load_rails
|
||||||
|
require "backup_restore/backup_restore"
|
||||||
|
require "backup_restore/restorer"
|
||||||
|
require "backup_restore/backup_store"
|
||||||
|
|
||||||
if !filename
|
if !filename
|
||||||
puts "You must provide a filename to restore. Did you mean one of the following?\n\n"
|
puts "You must provide a filename to restore. Did you mean one of the following?\n\n"
|
||||||
|
|
||||||
Dir["public/backups/default/*"].sort_by { |path| File.mtime(path) }.reverse.each do |f|
|
store = BackupRestore::BackupStore.create
|
||||||
puts "#{discourse} restore #{File.basename(f)}"
|
store.files.each do |file|
|
||||||
|
puts "#{discourse} restore #{file.filename}"
|
||||||
end
|
end
|
||||||
|
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
load_rails
|
|
||||||
require "backup_restore/backup_restore"
|
|
||||||
require "backup_restore/restorer"
|
|
||||||
|
|
||||||
begin
|
begin
|
||||||
puts "Starting restore: #{filename}"
|
puts "Starting restore: #{filename}"
|
||||||
restorer = BackupRestore::Restorer.new(Discourse.system_user.id, filename: filename)
|
restorer = BackupRestore::Restorer.new(Discourse.system_user.id, filename: filename)
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
require 'rails_helper'
|
||||||
|
require 'backup_restore/local_backup_store'
|
||||||
|
require_relative 'shared_examples_for_backup_store'
|
||||||
|
|
||||||
|
describe BackupRestore::LocalBackupStore do
|
||||||
|
before(:all) do
|
||||||
|
@base_directory = Dir.mktmpdir
|
||||||
|
@paths = []
|
||||||
|
end
|
||||||
|
|
||||||
|
after(:all) do
|
||||||
|
FileUtils.remove_dir(@base_directory, true)
|
||||||
|
end
|
||||||
|
|
||||||
|
before do
|
||||||
|
SiteSetting.backup_location = BackupLocationSiteSetting::LOCAL
|
||||||
|
end
|
||||||
|
|
||||||
|
subject(:store) { BackupRestore::BackupStore.create(base_directory: @base_directory) }
|
||||||
|
let(:expected_type) { BackupRestore::LocalBackupStore }
|
||||||
|
|
||||||
|
it_behaves_like "backup store"
|
||||||
|
|
||||||
|
it "is not a remote store" do
|
||||||
|
expect(store.remote?).to eq(false)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_backups
|
||||||
|
create_file(filename: "b.tar.gz", last_modified: "2018-09-13T15:10:00Z", size_in_bytes: 17)
|
||||||
|
create_file(filename: "a.tgz", last_modified: "2018-02-11T09:27:00Z", size_in_bytes: 29)
|
||||||
|
create_file(filename: "r.sql.gz", last_modified: "2017-12-20T03:48:00Z", size_in_bytes: 11)
|
||||||
|
create_file(filename: "no-backup.txt", last_modified: "2018-09-05T14:27:00Z", size_in_bytes: 12)
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_backups
|
||||||
|
@paths.each { |path| File.delete(path) if File.exists?(path) }
|
||||||
|
@paths.clear
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_file(filename:, last_modified:, size_in_bytes:)
|
||||||
|
path = File.join(@base_directory, filename)
|
||||||
|
return if File.exists?(path)
|
||||||
|
|
||||||
|
@paths << path
|
||||||
|
FileUtils.touch(path)
|
||||||
|
File.truncate(path, size_in_bytes)
|
||||||
|
|
||||||
|
time = Time.parse(last_modified)
|
||||||
|
File.utime(time, time, path)
|
||||||
|
end
|
||||||
|
|
||||||
|
def source_regex(filename)
|
||||||
|
path = File.join(@base_directory, filename)
|
||||||
|
/^#{Regexp.escape(path)}$/
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,109 @@
|
||||||
|
require 'rails_helper'
|
||||||
|
require 'backup_restore/s3_backup_store'
|
||||||
|
require_relative 'shared_examples_for_backup_store'
|
||||||
|
|
||||||
|
describe BackupRestore::S3BackupStore do
|
||||||
|
before(:all) do
|
||||||
|
@s3_client = Aws::S3::Client.new(stub_responses: true)
|
||||||
|
@s3_options = { client: @s3_client }
|
||||||
|
|
||||||
|
@objects = []
|
||||||
|
|
||||||
|
@s3_client.stub_responses(:list_objects, -> (context) do
|
||||||
|
expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket)
|
||||||
|
expect(context.params[:prefix]).to be_blank
|
||||||
|
|
||||||
|
{ contents: @objects }
|
||||||
|
end)
|
||||||
|
|
||||||
|
@s3_client.stub_responses(:delete_object, -> (context) do
|
||||||
|
expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket)
|
||||||
|
expect do
|
||||||
|
@objects.delete_if { |obj| obj[:key] == context.params[:key] }
|
||||||
|
end.to change { @objects }
|
||||||
|
end)
|
||||||
|
|
||||||
|
@s3_client.stub_responses(:head_object, -> (context) do
|
||||||
|
expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket)
|
||||||
|
|
||||||
|
if object = @objects.find { |obj| obj[:key] == context.params[:key] }
|
||||||
|
{ content_length: object[:size], last_modified: object[:last_modified] }
|
||||||
|
else
|
||||||
|
{ status_code: 404, headers: {}, body: "", }
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
@s3_client.stub_responses(:get_object, -> (context) do
|
||||||
|
expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket)
|
||||||
|
|
||||||
|
if object = @objects.find { |obj| obj[:key] == context.params[:key] }
|
||||||
|
{ content_length: object[:size], body: "A" * object[:size] }
|
||||||
|
else
|
||||||
|
{ status_code: 404, headers: {}, body: "", }
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
@s3_client.stub_responses(:put_object, -> (context) do
|
||||||
|
expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket)
|
||||||
|
|
||||||
|
@objects << {
|
||||||
|
key: context.params[:key],
|
||||||
|
size: context.params[:body].size,
|
||||||
|
last_modified: Time.zone.now
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
before do
|
||||||
|
SiteSetting.s3_backup_bucket = "s3-backup-bucket"
|
||||||
|
SiteSetting.s3_access_key_id = "s3-access-key-id"
|
||||||
|
SiteSetting.s3_secret_access_key = "s3-secret-access-key"
|
||||||
|
SiteSetting.backup_location = BackupLocationSiteSetting::S3
|
||||||
|
end
|
||||||
|
|
||||||
|
subject(:store) { BackupRestore::BackupStore.create(s3_options: @s3_options) }
|
||||||
|
let(:expected_type) { BackupRestore::S3BackupStore }
|
||||||
|
|
||||||
|
it_behaves_like "backup store"
|
||||||
|
it_behaves_like "remote backup store"
|
||||||
|
|
||||||
|
context "S3 specific behavior" do
|
||||||
|
before { create_backups }
|
||||||
|
after(:all) { remove_backups }
|
||||||
|
|
||||||
|
it "doesn't delete files when cleanup is disabled" do
|
||||||
|
SiteSetting.maximum_backups = 1
|
||||||
|
SiteSetting.s3_disable_cleanup = true
|
||||||
|
|
||||||
|
expect { store.delete_old }.to_not change { store.files }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_backups
|
||||||
|
@objects.clear
|
||||||
|
@objects << { key: "b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z") }
|
||||||
|
@objects << { key: "a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z") }
|
||||||
|
@objects << { key: "r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z") }
|
||||||
|
@objects << { key: "no-backup.txt", size: 12, last_modified: Time.parse("2018-09-05T14:27:00Z") }
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_backups
|
||||||
|
@objects.clear
|
||||||
|
end
|
||||||
|
|
||||||
|
def source_regex(filename)
|
||||||
|
bucket = Regexp.escape(SiteSetting.s3_backup_bucket)
|
||||||
|
filename = Regexp.escape(filename)
|
||||||
|
expires = BackupRestore::S3BackupStore::DOWNLOAD_URL_EXPIRES_AFTER_SECONDS
|
||||||
|
|
||||||
|
/\Ahttps:\/\/#{bucket}.*\/#{filename}\?.*X-Amz-Expires=#{expires}.*X-Amz-Signature=.*\z/
|
||||||
|
end
|
||||||
|
|
||||||
|
def upload_url_regex(filename)
|
||||||
|
bucket = Regexp.escape(SiteSetting.s3_backup_bucket)
|
||||||
|
filename = Regexp.escape(filename)
|
||||||
|
expires = BackupRestore::S3BackupStore::UPLOAD_URL_EXPIRES_AFTER_SECONDS
|
||||||
|
|
||||||
|
/\Ahttps:\/\/#{bucket}.*\/#{filename}\?.*X-Amz-Expires=#{expires}.*X-Amz-Signature=.*\z/
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,176 @@
|
||||||
|
shared_context "backups" do
|
||||||
|
before { create_backups }
|
||||||
|
after(:all) { remove_backups }
|
||||||
|
|
||||||
|
let(:backup1) { BackupFile.new(filename: "b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z")) }
|
||||||
|
let(:backup2) { BackupFile.new(filename: "a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z")) }
|
||||||
|
let(:backup3) { BackupFile.new(filename: "r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z")) }
|
||||||
|
end
|
||||||
|
|
||||||
|
shared_examples "backup store" do
|
||||||
|
it "creates the correct backup store" do
|
||||||
|
expect(store).to be_a(expected_type)
|
||||||
|
end
|
||||||
|
|
||||||
|
context "without backup files" do
|
||||||
|
describe "#files" do
|
||||||
|
it "returns an empty array when there are no files" do
|
||||||
|
expect(store.files).to be_empty
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "#latest_file" do
|
||||||
|
it "returns nil when there are no files" do
|
||||||
|
expect(store.latest_file).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "with backup files" do
|
||||||
|
include_context "backups"
|
||||||
|
|
||||||
|
describe "#files" do
|
||||||
|
it "sorts files by last modified date in descending order" do
|
||||||
|
expect(store.files).to eq([backup1, backup2, backup3])
|
||||||
|
end
|
||||||
|
|
||||||
|
it "returns only *.gz and *.tgz files" do
|
||||||
|
files = store.files
|
||||||
|
expect(files).to_not be_empty
|
||||||
|
expect(files.map(&:filename)).to contain_exactly(backup1.filename, backup2.filename, backup3.filename)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "#latest_file" do
|
||||||
|
it "returns the most recent backup file" do
|
||||||
|
expect(store.latest_file).to eq(backup1)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "returns nil when there are no files" do
|
||||||
|
store.files.each { |file| store.delete_file(file.filename) }
|
||||||
|
expect(store.latest_file).to be_nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "#delete_old" do
|
||||||
|
it "does nothing if the number of files is <= maximum_backups" do
|
||||||
|
SiteSetting.maximum_backups = 3
|
||||||
|
|
||||||
|
store.delete_old
|
||||||
|
expect(store.files).to eq([backup1, backup2, backup3])
|
||||||
|
end
|
||||||
|
|
||||||
|
it "deletes files starting by the oldest" do
|
||||||
|
SiteSetting.maximum_backups = 1
|
||||||
|
|
||||||
|
store.delete_old
|
||||||
|
expect(store.files).to eq([backup1])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "#file" do
|
||||||
|
it "returns information about the file when the file exists" do
|
||||||
|
expect(store.file(backup1.filename)).to eq(backup1)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "returns nil when the file doesn't exist" do
|
||||||
|
expect(store.file("foo.gz")).to be_nil
|
||||||
|
end
|
||||||
|
|
||||||
|
it "includes the file's source location if it is requested" do
|
||||||
|
file = store.file(backup1.filename, include_download_source: true)
|
||||||
|
expect(file.source).to match(source_regex(backup1.filename))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "#delete_file" do
|
||||||
|
it "deletes file when the file exists" do
|
||||||
|
expect(store.files).to include(backup1)
|
||||||
|
store.delete_file(backup1.filename)
|
||||||
|
expect(store.files).to_not include(backup1)
|
||||||
|
|
||||||
|
expect(store.file(backup1.filename)).to be_nil
|
||||||
|
end
|
||||||
|
|
||||||
|
it "does nothing when the file doesn't exist" do
|
||||||
|
expect { store.delete_file("foo.gz") }.to_not change { store.files }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "#download_file" do
|
||||||
|
it "downloads file to the destination" do
|
||||||
|
filename = backup1.filename
|
||||||
|
|
||||||
|
Dir.mktmpdir do |path|
|
||||||
|
destination_path = File.join(path, File.basename(filename))
|
||||||
|
store.download_file(filename, destination_path)
|
||||||
|
|
||||||
|
expect(File.exists?(destination_path)).to eq(true)
|
||||||
|
expect(File.size(destination_path)).to eq(backup1.size)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
it "raises an exception when the download fails" do
|
||||||
|
filename = backup1.filename
|
||||||
|
destination_path = Dir.mktmpdir { |path| File.join(path, File.basename(filename)) }
|
||||||
|
|
||||||
|
expect { store.download_file(filename, destination_path) }.to raise_exception(StandardError)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
shared_examples "remote backup store" do
|
||||||
|
it "is a remote store" do
|
||||||
|
expect(store.remote?).to eq(true)
|
||||||
|
end
|
||||||
|
|
||||||
|
context "with backups" do
|
||||||
|
include_context "backups"
|
||||||
|
|
||||||
|
describe "#upload_file" do
|
||||||
|
it "uploads file into store" do
|
||||||
|
freeze_time
|
||||||
|
|
||||||
|
backup = BackupFile.new(
|
||||||
|
filename: "foo.tar.gz",
|
||||||
|
size: 33,
|
||||||
|
last_modified: Time.zone.now
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(store.files).to_not include(backup)
|
||||||
|
|
||||||
|
Tempfile.create(backup.filename) do |file|
|
||||||
|
file.write("A" * backup.size)
|
||||||
|
file.close
|
||||||
|
|
||||||
|
store.upload_file(backup.filename, file.path, "application/gzip")
|
||||||
|
end
|
||||||
|
|
||||||
|
expect(store.files).to include(backup)
|
||||||
|
expect(store.file(backup.filename)).to eq(backup)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "raises an exception when a file with same filename exists" do
|
||||||
|
Tempfile.create(backup1.filename) do |file|
|
||||||
|
expect { store.upload_file(backup1.filename, file.path, "application/gzip") }
|
||||||
|
.to raise_exception(BackupRestore::BackupStore::BackupFileExists)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "#generate_upload_url" do
|
||||||
|
it "generates upload URL" do
|
||||||
|
filename = "foo.tar.gz"
|
||||||
|
url = store.generate_upload_url(filename)
|
||||||
|
|
||||||
|
expect(url).to match(upload_url_regex(filename))
|
||||||
|
end
|
||||||
|
|
||||||
|
it "raises an exeption when a file with same filename exists" do
|
||||||
|
expect { store.generate_upload_url(backup1.filename) }
|
||||||
|
.to raise_exception(BackupRestore::BackupStore::BackupFileExists)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -210,9 +210,9 @@ describe AdminDashboardData do
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when setting is enabled' do
|
context 'when setting is enabled' do
|
||||||
let(:setting_enabled) { true }
|
|
||||||
before do
|
before do
|
||||||
SiteSetting.public_send("#{setting_key}=", setting_enabled)
|
all_setting_keys.each { |key| SiteSetting.public_send("#{key}=", 'foo') }
|
||||||
|
SiteSetting.public_send("#{setting[:key]}=", setting[:enabled_value])
|
||||||
SiteSetting.public_send("#{bucket_key}=", bucket_value)
|
SiteSetting.public_send("#{bucket_key}=", bucket_value)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -229,7 +229,7 @@ describe AdminDashboardData do
|
||||||
context 'when bucket is filled in' do
|
context 'when bucket is filled in' do
|
||||||
let(:bucket_value) { 'a' }
|
let(:bucket_value) { 'a' }
|
||||||
before do
|
before do
|
||||||
SiteSetting.public_send("s3_use_iam_profile=", use_iam_profile)
|
SiteSetting.s3_use_iam_profile = use_iam_profile
|
||||||
end
|
end
|
||||||
|
|
||||||
context 'when using iam profile' do
|
context 'when using iam profile' do
|
||||||
|
@ -260,7 +260,7 @@ describe AdminDashboardData do
|
||||||
|
|
||||||
context 'when setting is not enabled' do
|
context 'when setting is not enabled' do
|
||||||
before do
|
before do
|
||||||
SiteSetting.public_send("#{setting_key}=", false)
|
SiteSetting.public_send("#{setting[:key]}=", setting[:disabled_value])
|
||||||
end
|
end
|
||||||
|
|
||||||
it "always returns nil" do
|
it "always returns nil" do
|
||||||
|
@ -272,13 +272,25 @@ describe AdminDashboardData do
|
||||||
end
|
end
|
||||||
|
|
||||||
describe 'uploads' do
|
describe 'uploads' do
|
||||||
let(:setting_key) { :enable_s3_uploads }
|
let(:setting) do
|
||||||
|
{
|
||||||
|
key: :enable_s3_uploads,
|
||||||
|
enabled_value: true,
|
||||||
|
disabled_value: false
|
||||||
|
}
|
||||||
|
end
|
||||||
let(:bucket_key) { :s3_upload_bucket }
|
let(:bucket_key) { :s3_upload_bucket }
|
||||||
include_examples 'problem detection for s3-dependent setting'
|
include_examples 'problem detection for s3-dependent setting'
|
||||||
end
|
end
|
||||||
|
|
||||||
describe 'backups' do
|
describe 'backups' do
|
||||||
let(:setting_key) { :enable_s3_backups }
|
let(:setting) do
|
||||||
|
{
|
||||||
|
key: :backup_location,
|
||||||
|
enabled_value: BackupLocationSiteSetting::S3,
|
||||||
|
disabled_value: BackupLocationSiteSetting::LOCAL
|
||||||
|
}
|
||||||
|
end
|
||||||
let(:bucket_key) { :s3_backup_bucket }
|
let(:bucket_key) { :s3_backup_bucket }
|
||||||
include_examples 'problem detection for s3-dependent setting'
|
include_examples 'problem detection for s3-dependent setting'
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,130 +0,0 @@
|
||||||
require 'rails_helper'
|
|
||||||
require "s3_helper"
|
|
||||||
|
|
||||||
require_dependency 'backup'
|
|
||||||
|
|
||||||
describe Backup do
|
|
||||||
|
|
||||||
let(:b1) { Backup.new('backup1') }
|
|
||||||
let(:b2) { Backup.new('backup2') }
|
|
||||||
let(:b3) { Backup.new('backup3') }
|
|
||||||
|
|
||||||
before do
|
|
||||||
Backup.stubs(:all).returns([b1, b2, b3])
|
|
||||||
end
|
|
||||||
|
|
||||||
context '#remove_old' do
|
|
||||||
it "does nothing if there aren't more backups than the setting" do
|
|
||||||
SiteSetting.maximum_backups = 3
|
|
||||||
Backup.any_instance.expects(:remove).never
|
|
||||||
Backup.remove_old
|
|
||||||
end
|
|
||||||
|
|
||||||
it "calls remove on the backups over our limit" do
|
|
||||||
SiteSetting.maximum_backups = 1
|
|
||||||
b1.expects(:remove).never
|
|
||||||
b2.expects(:remove).once
|
|
||||||
b3.expects(:remove).once
|
|
||||||
Backup.remove_old
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
shared_context 's3 helpers' do
|
|
||||||
let(:client) { Aws::S3::Client.new(stub_responses: true) }
|
|
||||||
let(:resource) { Aws::S3::Resource.new(client: client) }
|
|
||||||
let!(:s3_bucket) { resource.bucket("s3-upload-bucket") }
|
|
||||||
let(:s3_helper) { b1.s3 }
|
|
||||||
|
|
||||||
before(:each) do
|
|
||||||
SiteSetting.s3_backup_bucket = "s3-upload-bucket"
|
|
||||||
SiteSetting.s3_access_key_id = "s3-access-key-id"
|
|
||||||
SiteSetting.s3_secret_access_key = "s3-secret-access-key"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context ".after_create_hook" do
|
|
||||||
context "when SiteSetting is true" do
|
|
||||||
include_context "s3 helpers"
|
|
||||||
|
|
||||||
before do
|
|
||||||
SiteSetting.enable_s3_backups = true
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should upload the backup to S3 with the right paths" do
|
|
||||||
b1.path = 'some/path/backup.gz'
|
|
||||||
File.expects(:open).with(b1.path).yields(stub)
|
|
||||||
|
|
||||||
s3_helper.expects(:s3_bucket).returns(s3_bucket)
|
|
||||||
s3_object = stub
|
|
||||||
|
|
||||||
s3_bucket.expects(:object).with(b1.filename).returns(s3_object)
|
|
||||||
s3_object.expects(:upload_file)
|
|
||||||
|
|
||||||
b1.after_create_hook
|
|
||||||
end
|
|
||||||
|
|
||||||
context "when s3_backup_bucket includes folders path" do
|
|
||||||
before do
|
|
||||||
SiteSetting.s3_backup_bucket = "s3-upload-bucket/discourse-backups"
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should upload the backup to S3 with the right paths" do
|
|
||||||
b1.path = 'some/path/backup.gz'
|
|
||||||
File.expects(:open).with(b1.path).yields(stub)
|
|
||||||
|
|
||||||
s3_helper.expects(:s3_bucket).returns(s3_bucket)
|
|
||||||
s3_object = stub
|
|
||||||
|
|
||||||
s3_bucket.expects(:object).with("discourse-backups/#{b1.filename}").returns(s3_object)
|
|
||||||
s3_object.expects(:upload_file)
|
|
||||||
|
|
||||||
b1.after_create_hook
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
it "calls upload_to_s3 if the SiteSetting is false" do
|
|
||||||
SiteSetting.enable_s3_backups = false
|
|
||||||
b1.expects(:upload_to_s3).never
|
|
||||||
b1.after_create_hook
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context ".after_remove_hook" do
|
|
||||||
include_context "s3 helpers"
|
|
||||||
|
|
||||||
context "when SiteSetting is true" do
|
|
||||||
before do
|
|
||||||
SiteSetting.enable_s3_backups = true
|
|
||||||
end
|
|
||||||
|
|
||||||
context "when s3_backup_bucket includes folders path" do
|
|
||||||
before do
|
|
||||||
SiteSetting.s3_backup_bucket = "s3-upload-bucket/discourse-backups"
|
|
||||||
end
|
|
||||||
|
|
||||||
it "should upload the backup to S3 with the right paths" do
|
|
||||||
s3_helper.expects(:s3_bucket).returns(s3_bucket)
|
|
||||||
s3_object = stub
|
|
||||||
|
|
||||||
s3_bucket.expects(:object).with("discourse-backups/#{b1.filename}").returns(s3_object)
|
|
||||||
s3_object.expects(:delete)
|
|
||||||
|
|
||||||
b1.after_remove_hook
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context "when SiteSetting is false" do
|
|
||||||
before do
|
|
||||||
SiteSetting.enable_s3_backups = false
|
|
||||||
end
|
|
||||||
|
|
||||||
it "doesn’t call remove_from_s3" do
|
|
||||||
b1.expects(:remove_from_s3).never
|
|
||||||
b1.after_remove_hook
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
end
|
|
|
@ -4,6 +4,25 @@ RSpec.describe Admin::BackupsController do
|
||||||
let(:admin) { Fabricate(:admin) }
|
let(:admin) { Fabricate(:admin) }
|
||||||
let(:backup_filename) { "2014-02-10-065935.tar.gz" }
|
let(:backup_filename) { "2014-02-10-065935.tar.gz" }
|
||||||
let(:backup_filename2) { "2014-02-11-065935.tar.gz" }
|
let(:backup_filename2) { "2014-02-11-065935.tar.gz" }
|
||||||
|
let(:store) { BackupRestore::LocalBackupStore.new }
|
||||||
|
|
||||||
|
def create_backup_files(*filenames)
|
||||||
|
@paths = filenames.map do |filename|
|
||||||
|
path = backup_path(filename)
|
||||||
|
File.open(path, "w") { |f| f.write("test backup") }
|
||||||
|
path
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def backup_path(filename)
|
||||||
|
File.join(BackupRestore::LocalBackupStore.base_directory, filename)
|
||||||
|
end
|
||||||
|
|
||||||
|
def map_preloaded
|
||||||
|
controller.instance_variable_get("@preloaded").map do |key, value|
|
||||||
|
[key, JSON.parse(value)]
|
||||||
|
end.to_h
|
||||||
|
end
|
||||||
|
|
||||||
it "is a subclass of AdminController" do
|
it "is a subclass of AdminController" do
|
||||||
expect(Admin::BackupsController < Admin::AdminController).to eq(true)
|
expect(Admin::BackupsController < Admin::AdminController).to eq(true)
|
||||||
|
@ -11,10 +30,14 @@ RSpec.describe Admin::BackupsController do
|
||||||
|
|
||||||
before do
|
before do
|
||||||
sign_in(admin)
|
sign_in(admin)
|
||||||
|
SiteSetting.backup_location = BackupLocationSiteSetting::LOCAL
|
||||||
end
|
end
|
||||||
|
|
||||||
after do
|
after do
|
||||||
$redis.flushall
|
$redis.flushall
|
||||||
|
|
||||||
|
@paths&.each { |path| File.delete(path) if File.exists?(path) }
|
||||||
|
@paths = nil
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "#index" do
|
describe "#index" do
|
||||||
|
@ -29,11 +52,7 @@ RSpec.describe Admin::BackupsController do
|
||||||
get "/admin/backups.html"
|
get "/admin/backups.html"
|
||||||
expect(response.status).to eq(200)
|
expect(response.status).to eq(200)
|
||||||
|
|
||||||
preloaded = controller.instance_variable_get("@preloaded").map do |key, value|
|
preloaded = map_preloaded
|
||||||
[key, JSON.parse(value)]
|
|
||||||
end.to_h
|
|
||||||
|
|
||||||
expect(preloaded["backups"].size).to eq(Backup.all.size)
|
|
||||||
expect(preloaded["operations_status"].symbolize_keys).to eq(BackupRestore.operations_status)
|
expect(preloaded["operations_status"].symbolize_keys).to eq(BackupRestore.operations_status)
|
||||||
expect(preloaded["logs"].size).to eq(BackupRestore.logs.size)
|
expect(preloaded["logs"].size).to eq(BackupRestore.logs.size)
|
||||||
end
|
end
|
||||||
|
@ -42,23 +61,14 @@ RSpec.describe Admin::BackupsController do
|
||||||
context "json format" do
|
context "json format" do
|
||||||
it "returns a list of all the backups" do
|
it "returns a list of all the backups" do
|
||||||
begin
|
begin
|
||||||
paths = []
|
create_backup_files(backup_filename, backup_filename2)
|
||||||
[backup_filename, backup_filename2].each do |name|
|
|
||||||
path = File.join(Backup.base_directory, name)
|
|
||||||
paths << path
|
|
||||||
File.open(path, "w") { |f| f.write("hello") }
|
|
||||||
Backup.create_from_filename(name)
|
|
||||||
end
|
|
||||||
|
|
||||||
get "/admin/backups.json"
|
get "/admin/backups.json"
|
||||||
|
|
||||||
expect(response.status).to eq(200)
|
expect(response.status).to eq(200)
|
||||||
|
|
||||||
json = JSON.parse(response.body).map { |backup| backup["filename"] }
|
filenames = JSON.parse(response.body).map { |backup| backup["filename"] }
|
||||||
expect(json).to include(backup_filename)
|
expect(filenames).to include(backup_filename)
|
||||||
expect(json).to include(backup_filename2)
|
expect(filenames).to include(backup_filename2)
|
||||||
ensure
|
|
||||||
paths.each { |path| File.delete(path) }
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -88,36 +98,23 @@ RSpec.describe Admin::BackupsController do
|
||||||
it "uses send_file to transmit the backup" do
|
it "uses send_file to transmit the backup" do
|
||||||
begin
|
begin
|
||||||
token = EmailBackupToken.set(admin.id)
|
token = EmailBackupToken.set(admin.id)
|
||||||
path = File.join(Backup.base_directory, backup_filename)
|
create_backup_files(backup_filename)
|
||||||
File.open(path, "w") { |f| f.write("hello") }
|
|
||||||
|
|
||||||
Backup.create_from_filename(backup_filename)
|
|
||||||
|
|
||||||
expect do
|
expect do
|
||||||
get "/admin/backups/#{backup_filename}.json", params: { token: token }
|
get "/admin/backups/#{backup_filename}.json", params: { token: token }
|
||||||
end.to change { UserHistory.where(action: UserHistory.actions[:backup_download]).count }.by(1)
|
end.to change { UserHistory.where(action: UserHistory.actions[:backup_download]).count }.by(1)
|
||||||
|
|
||||||
expect(response.headers['Content-Length']).to eq("5")
|
expect(response.headers['Content-Length']).to eq("11")
|
||||||
expect(response.headers['Content-Disposition']).to match(/attachment; filename/)
|
expect(response.headers['Content-Disposition']).to match(/attachment; filename/)
|
||||||
ensure
|
|
||||||
File.delete(path)
|
|
||||||
EmailBackupToken.del(admin.id)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
it "returns 422 when token is bad" do
|
it "returns 422 when token is bad" do
|
||||||
begin
|
begin
|
||||||
path = File.join(Backup.base_directory, backup_filename)
|
|
||||||
File.open(path, "w") { |f| f.write("hello") }
|
|
||||||
|
|
||||||
Backup.create_from_filename(backup_filename)
|
|
||||||
|
|
||||||
get "/admin/backups/#{backup_filename}.json", params: { token: "bad_value" }
|
get "/admin/backups/#{backup_filename}.json", params: { token: "bad_value" }
|
||||||
|
|
||||||
expect(response.status).to eq(422)
|
expect(response.status).to eq(422)
|
||||||
expect(response.headers['Content-Disposition']).not_to match(/attachment; filename/)
|
expect(response.headers['Content-Disposition']).not_to match(/attachment; filename/)
|
||||||
ensure
|
|
||||||
File.delete(path)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -125,20 +122,16 @@ RSpec.describe Admin::BackupsController do
|
||||||
token = EmailBackupToken.set(admin.id)
|
token = EmailBackupToken.set(admin.id)
|
||||||
get "/admin/backups/#{backup_filename}.json", params: { token: token }
|
get "/admin/backups/#{backup_filename}.json", params: { token: token }
|
||||||
|
|
||||||
EmailBackupToken.del(admin.id)
|
|
||||||
expect(response.status).to eq(404)
|
expect(response.status).to eq(404)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#destroy' do
|
describe '#destroy' do
|
||||||
let(:b) { Backup.new(backup_filename) }
|
|
||||||
|
|
||||||
it "removes the backup if found" do
|
it "removes the backup if found" do
|
||||||
begin
|
begin
|
||||||
path = File.join(Backup.base_directory, backup_filename)
|
path = backup_path(backup_filename)
|
||||||
File.open(path, "w") { |f| f.write("hello") }
|
create_backup_files(backup_filename)
|
||||||
|
expect(File.exists?(path)).to eq(true)
|
||||||
Backup.create_from_filename(backup_filename)
|
|
||||||
|
|
||||||
expect do
|
expect do
|
||||||
delete "/admin/backups/#{backup_filename}.json"
|
delete "/admin/backups/#{backup_filename}.json"
|
||||||
|
@ -146,8 +139,6 @@ RSpec.describe Admin::BackupsController do
|
||||||
|
|
||||||
expect(response.status).to eq(200)
|
expect(response.status).to eq(200)
|
||||||
expect(File.exists?(path)).to eq(false)
|
expect(File.exists?(path)).to eq(false)
|
||||||
ensure
|
|
||||||
File.delete(path) if File.exists?(path)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -162,9 +153,7 @@ RSpec.describe Admin::BackupsController do
|
||||||
get "/admin/backups/logs.html"
|
get "/admin/backups/logs.html"
|
||||||
expect(response.status).to eq(200)
|
expect(response.status).to eq(200)
|
||||||
|
|
||||||
preloaded = controller.instance_variable_get("@preloaded").map do |key, value|
|
preloaded = map_preloaded
|
||||||
[key, JSON.parse(value)]
|
|
||||||
end.to_h
|
|
||||||
|
|
||||||
expect(preloaded["operations_status"].symbolize_keys).to eq(BackupRestore.operations_status)
|
expect(preloaded["operations_status"].symbolize_keys).to eq(BackupRestore.operations_status)
|
||||||
expect(preloaded["logs"].size).to eq(BackupRestore.logs.size)
|
expect(preloaded["logs"].size).to eq(BackupRestore.logs.size)
|
||||||
|
@ -228,6 +217,7 @@ RSpec.describe Admin::BackupsController do
|
||||||
described_class.any_instance.expects(:has_enough_space_on_disk?).returns(true)
|
described_class.any_instance.expects(:has_enough_space_on_disk?).returns(true)
|
||||||
|
|
||||||
filename = 'test_Site-0123456789.tar.gz'
|
filename = 'test_Site-0123456789.tar.gz'
|
||||||
|
@paths = [backup_path(File.join('tmp', 'test', "#{filename}.part1"))]
|
||||||
|
|
||||||
post "/admin/backups/upload.json", params: {
|
post "/admin/backups/upload.json", params: {
|
||||||
resumableFilename: filename,
|
resumableFilename: filename,
|
||||||
|
@ -241,13 +231,6 @@ RSpec.describe Admin::BackupsController do
|
||||||
|
|
||||||
expect(response.status).to eq(200)
|
expect(response.status).to eq(200)
|
||||||
expect(response.body).to eq("")
|
expect(response.body).to eq("")
|
||||||
ensure
|
|
||||||
begin
|
|
||||||
File.delete(
|
|
||||||
File.join(Backup.base_directory, 'tmp', 'test', "#{filename}.part1")
|
|
||||||
)
|
|
||||||
rescue Errno::ENOENT
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -284,18 +267,16 @@ RSpec.describe Admin::BackupsController do
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "#email" do
|
describe "#email" do
|
||||||
let(:backup_filename) { "test.tar.gz" }
|
|
||||||
let(:backup) { Backup.new(backup_filename) }
|
|
||||||
|
|
||||||
it "enqueues email job" do
|
it "enqueues email job" do
|
||||||
Backup.expects(:[]).with(backup_filename).returns(backup)
|
create_backup_files(backup_filename)
|
||||||
|
|
||||||
Jobs.expects(:enqueue).with(:download_backup_email,
|
expect {
|
||||||
user_id: admin.id,
|
put "/admin/backups/#{backup_filename}.json"
|
||||||
backup_file_path: 'http://www.example.com/admin/backups/test.tar.gz'
|
}.to change { Jobs::DownloadBackupEmail.jobs.size }.by(1)
|
||||||
)
|
|
||||||
|
|
||||||
put "/admin/backups/#{backup_filename}.json"
|
job_args = Jobs::DownloadBackupEmail.jobs.last["args"].first
|
||||||
|
expect(job_args["user_id"]).to eq(admin.id)
|
||||||
|
expect(job_args["backup_file_path"]).to eq("http://www.example.com/admin/backups/#{backup_filename}")
|
||||||
|
|
||||||
expect(response.status).to eq(200)
|
expect(response.status).to eq(200)
|
||||||
end
|
end
|
||||||
|
|
|
@ -109,6 +109,14 @@ QUnit.test("ensures an authorized upload", assert => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
QUnit.test("skipping validation works", assert => {
|
||||||
|
const files = [{ name: "backup.tar.gz" }];
|
||||||
|
sandbox.stub(bootbox, "alert");
|
||||||
|
|
||||||
|
assert.not(validUpload(files, { skipValidation: false }));
|
||||||
|
assert.ok(validUpload(files, { skipValidation: true }));
|
||||||
|
});
|
||||||
|
|
||||||
QUnit.test("staff can upload anything in PM", assert => {
|
QUnit.test("staff can upload anything in PM", assert => {
|
||||||
const files = [{ name: "some.docx" }];
|
const files = [{ name: "some.docx" }];
|
||||||
Discourse.SiteSettings.authorized_extensions = "jpeg";
|
Discourse.SiteSettings.authorized_extensions = "jpeg";
|
||||||
|
|
Loading…
Reference in New Issue