From 84d4c81a26bb585f67809c6f8af25c2a385de754 Mon Sep 17 00:00:00 2001 From: Guo Xiang Tan Date: Mon, 15 Oct 2018 09:43:31 +0800 Subject: [PATCH] FEATURE: Support backup uploads/downloads directly to/from S3. This reverts commit 3c59106bac4d79f39981bda3ff9db7786c1a78a0. --- .../controllers/admin-backups-index.js.es6 | 6 + .../javascripts/admin/models/backup.js.es6 | 7 +- .../admin/routes/admin-backups.js.es6 | 9 + .../admin/templates/backups-index.hbs | 9 +- .../components/watched-word-uploader.hbs | 2 +- .../components/backup-uploader.js.es6 | 51 +++++ .../discourse/lib/utilities.js.es6 | 1 + .../discourse/mixins/upload.js.es6 | 53 ++++-- .../templates/components/avatar-uploader.hbs | 2 +- .../templates/components/backup-uploader.hbs | 4 + .../templates/components/csv-uploader.hbs | 2 +- .../templates/components/emoji-uploader.hbs | 2 +- .../templates/components/image-uploader.hbs | 2 +- .../templates/components/images-uploader.hbs | 2 +- .../components/wizard-field-image.hbs | 2 +- .../stylesheets/common/base/upload.scss | 5 + app/assets/stylesheets/wizard.scss | 5 + app/controllers/admin/backups_controller.rb | 100 +++++++--- .../admin/dashboard_next_controller.rb | 8 +- app/jobs/regular/backup_chunks_merger.rb | 19 +- app/jobs/scheduled/schedule_backup.rb | 5 +- app/models/admin_dashboard_data.rb | 2 +- app/models/backup.rb | 92 --------- app/models/backup_file.rb | 25 +++ app/models/backup_location_site_setting.rb | 23 +++ app/serializers/backup_file_serializer.rb | 5 + app/serializers/backup_serializer.rb | 3 - app/services/handle_chunk_upload.rb | 3 +- config/locales/client.en.yml | 4 + config/locales/server.en.yml | 7 +- config/routes.rb | 1 + config/site_settings.yml | 9 +- ...6195601_migrate_s3_backup_site_settings.rb | 33 ++++ lib/backup_restore/backup_restore.rb | 4 +- lib/backup_restore/backup_store.rb | 74 ++++++++ lib/backup_restore/backuper.rb | 48 +++-- lib/backup_restore/local_backup_store.rb | 65 +++++++ lib/backup_restore/restorer.rb | 13 +- lib/backup_restore/s3_backup_store.rb | 95 ++++++++++ lib/disk_space.rb | 6 +- lib/json_error.rb | 6 + lib/s3_helper.rb | 26 +-- lib/site_settings/validations.rb | 13 +- lib/tasks/s3.rake | 2 + script/discourse | 38 ++-- .../backup_restore/local_backup_store_spec.rb | 56 ++++++ .../backup_restore/s3_backup_store_spec.rb | 109 +++++++++++ .../shared_examples_for_backup_store.rb | 176 ++++++++++++++++++ spec/models/admin_dashboard_data_spec.rb | 24 ++- spec/models/backup_spec.rb | 130 ------------- .../requests/admin/backups_controller_spec.rb | 103 +++++----- test/javascripts/lib/utilities-test.js.es6 | 8 + 52 files changed, 1079 insertions(+), 420 deletions(-) create mode 100644 app/assets/javascripts/discourse/components/backup-uploader.js.es6 create mode 100644 app/assets/javascripts/discourse/templates/components/backup-uploader.hbs delete mode 100644 app/models/backup.rb create mode 100644 app/models/backup_file.rb create mode 100644 app/models/backup_location_site_setting.rb create mode 100644 app/serializers/backup_file_serializer.rb delete mode 100644 app/serializers/backup_serializer.rb create mode 100644 db/migrate/20180916195601_migrate_s3_backup_site_settings.rb create mode 100644 lib/backup_restore/backup_store.rb create mode 100644 lib/backup_restore/local_backup_store.rb create mode 100644 lib/backup_restore/s3_backup_store.rb create mode 100644 spec/lib/backup_restore/local_backup_store_spec.rb create mode 100644 spec/lib/backup_restore/s3_backup_store_spec.rb create mode 100644 spec/lib/backup_restore/shared_examples_for_backup_store.rb delete mode 100644 spec/models/backup_spec.rb diff --git a/app/assets/javascripts/admin/controllers/admin-backups-index.js.es6 b/app/assets/javascripts/admin/controllers/admin-backups-index.js.es6 index 49e8d9d4412..6448c2565a6 100644 --- a/app/assets/javascripts/admin/controllers/admin-backups-index.js.es6 +++ b/app/assets/javascripts/admin/controllers/admin-backups-index.js.es6 @@ -1,9 +1,15 @@ import { ajax } from "discourse/lib/ajax"; +import computed from "ember-addons/ember-computed-decorators"; export default Ember.Controller.extend({ adminBackups: Ember.inject.controller(), status: Ember.computed.alias("adminBackups.model"), + @computed + localBackupStorage() { + return this.siteSettings.backup_location === "local"; + }, + uploadLabel: function() { return I18n.t("admin.backups.upload.label"); }.property(), diff --git a/app/assets/javascripts/admin/models/backup.js.es6 b/app/assets/javascripts/admin/models/backup.js.es6 index dd6e8046e15..0f6663065dc 100644 --- a/app/assets/javascripts/admin/models/backup.js.es6 +++ b/app/assets/javascripts/admin/models/backup.js.es6 @@ -1,5 +1,4 @@ import { ajax } from "discourse/lib/ajax"; -import PreloadStore from "preload-store"; const Backup = Discourse.Model.extend({ destroy() { @@ -16,9 +15,9 @@ const Backup = Discourse.Model.extend({ Backup.reopenClass({ find() { - return PreloadStore.getAndRemove("backups", () => - ajax("/admin/backups.json") - ).then(backups => backups.map(backup => Backup.create(backup))); + return ajax("/admin/backups.json").then(backups => + backups.map(backup => Backup.create(backup)) + ); }, start(withUploads) { diff --git a/app/assets/javascripts/admin/routes/admin-backups.js.es6 b/app/assets/javascripts/admin/routes/admin-backups.js.es6 index a0b9342c619..4514eec2054 100644 --- a/app/assets/javascripts/admin/routes/admin-backups.js.es6 +++ b/app/assets/javascripts/admin/routes/admin-backups.js.es6 @@ -151,6 +151,15 @@ export default Discourse.Route.extend({ message: message }) ); + }, + + remoteUploadSuccess() { + Backup.find().then(backups => { + this.controllerFor("adminBackupsIndex").set( + "model", + backups.map(backup => Backup.create(backup)) + ); + }); } } }); diff --git a/app/assets/javascripts/admin/templates/backups-index.hbs b/app/assets/javascripts/admin/templates/backups-index.hbs index 3b20c7935e6..bcf8dccaa4d 100644 --- a/app/assets/javascripts/admin/templates/backups-index.hbs +++ b/app/assets/javascripts/admin/templates/backups-index.hbs @@ -1,5 +1,10 @@
- {{resumable-upload target="/admin/backups/upload" success="uploadSuccess" error="uploadError" uploadText=uploadLabel title="admin.backups.upload.title"}} + {{#if localBackupStorage}} + {{resumable-upload target="/admin/backups/upload" success="uploadSuccess" error="uploadError" uploadText=uploadLabel title="admin.backups.upload.title"}} + {{else}} + {{backup-uploader done="remoteUploadSuccess"}} + {{/if}} + {{#if site.isReadOnly}} {{d-button icon="eye" action="toggleReadOnlyMode" disabled=status.isOperationRunning title="admin.backups.read_only.disable.title" label="admin.backups.read_only.disable.label"}} {{else}} @@ -10,7 +15,7 @@ {{i18n 'admin.backups.columns.filename'}} {{i18n 'admin.backups.columns.size'}} - + {{#each model as |backup|}} diff --git a/app/assets/javascripts/admin/templates/components/watched-word-uploader.hbs b/app/assets/javascripts/admin/templates/components/watched-word-uploader.hbs index caa74a86152..e69083494fb 100644 --- a/app/assets/javascripts/admin/templates/components/watched-word-uploader.hbs +++ b/app/assets/javascripts/admin/templates/components/watched-word-uploader.hbs @@ -1,7 +1,7 @@
One word per line diff --git a/app/assets/javascripts/discourse/components/backup-uploader.js.es6 b/app/assets/javascripts/discourse/components/backup-uploader.js.es6 new file mode 100644 index 00000000000..3b54695e3d0 --- /dev/null +++ b/app/assets/javascripts/discourse/components/backup-uploader.js.es6 @@ -0,0 +1,51 @@ +import { ajax } from "discourse/lib/ajax"; +import computed from "ember-addons/ember-computed-decorators"; +import UploadMixin from "discourse/mixins/upload"; + +export default Em.Component.extend(UploadMixin, { + tagName: "span", + + @computed("uploading", "uploadProgress") + uploadButtonText(uploading, progress) { + return uploading + ? I18n.t("admin.backups.upload.uploading_progress", { progress }) + : I18n.t("admin.backups.upload.label"); + }, + + validateUploadedFilesOptions() { + return { skipValidation: true }; + }, + + uploadDone() { + this.sendAction("done"); + }, + + calculateUploadUrl() { + return ""; + }, + + uploadOptions() { + return { + type: "PUT", + dataType: "xml", + autoUpload: false + }; + }, + + _init: function() { + const $upload = this.$(); + + $upload.on("fileuploadadd", (e, data) => { + ajax("/admin/backups/upload_url", { + data: { filename: data.files[0].name } + }).then(result => { + if (!result.success) { + bootbox.alert(result.message); + } else { + data.url = result.url; + data.submit(); + } + }); + }); + }.on("didInsertElement") +}); diff --git a/app/assets/javascripts/discourse/lib/utilities.js.es6 b/app/assets/javascripts/discourse/lib/utilities.js.es6 index 9ab42d57806..0ab5c4c933e 100644 --- a/app/assets/javascripts/discourse/lib/utilities.js.es6 +++ b/app/assets/javascripts/discourse/lib/utilities.js.es6 @@ -225,6 +225,7 @@ export function validateUploadedFiles(files, opts) { } export function validateUploadedFile(file, opts) { + if (opts.skipValidation) return true; if (!authorizesOneOrMoreExtensions()) return false; opts = opts || {}; diff --git a/app/assets/javascripts/discourse/mixins/upload.js.es6 b/app/assets/javascripts/discourse/mixins/upload.js.es6 index 69925003069..6ff81b8ec05 100644 --- a/app/assets/javascripts/discourse/mixins/upload.js.es6 +++ b/app/assets/javascripts/discourse/mixins/upload.js.es6 @@ -2,6 +2,7 @@ import { displayErrorForUpload, validateUploadedFiles } from "discourse/lib/utilities"; +import getUrl from "discourse-common/lib/get-url"; export default Em.Mixin.create({ uploading: false, @@ -15,13 +16,25 @@ export default Em.Mixin.create({ return {}; }, + calculateUploadUrl() { + return ( + getUrl(this.getWithDefault("uploadUrl", "/uploads")) + + ".json?client_id=" + + this.messageBus.clientId + + "&authenticity_token=" + + encodeURIComponent(Discourse.Session.currentProp("csrfToken")) + ); + }, + + uploadOptions() { + return {}; + }, + _initialize: function() { - const $upload = this.$(), - csrf = Discourse.Session.currentProp("csrfToken"), - uploadUrl = Discourse.getURL( - this.getWithDefault("uploadUrl", "/uploads") - ), - reset = () => this.setProperties({ uploading: false, uploadProgress: 0 }); + const $upload = this.$(); + const reset = () => + this.setProperties({ uploading: false, uploadProgress: 0 }); + const maxFiles = this.getWithDefault("maxFiles", 10); $upload.on("fileuploaddone", (e, data) => { let upload = data.result; @@ -29,20 +42,21 @@ export default Em.Mixin.create({ reset(); }); - $upload.fileupload({ - url: - uploadUrl + - ".json?client_id=" + - this.messageBus.clientId + - "&authenticity_token=" + - encodeURIComponent(csrf), - dataType: "json", - dropZone: $upload, - pasteZone: $upload - }); + $upload.fileupload( + _.merge( + { + url: this.calculateUploadUrl(), + dataType: "json", + replaceFileInput: false, + dropZone: $upload, + pasteZone: $upload + }, + this.uploadOptions() + ) + ); $upload.on("fileuploaddrop", (e, data) => { - if (data.files.length > 10) { + if (data.files.length > maxFiles) { bootbox.alert(I18n.t("post.errors.too_many_dragged_and_dropped_files")); return false; } else { @@ -56,7 +70,8 @@ export default Em.Mixin.create({ this.validateUploadedFilesOptions() ); const isValid = validateUploadedFiles(data.files, opts); - let form = { type: this.get("type") }; + const type = this.get("type"); + let form = type ? { type } : {}; if (this.get("data")) { form = $.extend(form, this.get("data")); } diff --git a/app/assets/javascripts/discourse/templates/components/avatar-uploader.hbs b/app/assets/javascripts/discourse/templates/components/avatar-uploader.hbs index b43cbb0d567..f6b2102c92a 100644 --- a/app/assets/javascripts/discourse/templates/components/avatar-uploader.hbs +++ b/app/assets/javascripts/discourse/templates/components/avatar-uploader.hbs @@ -1,6 +1,6 @@ {{#if uploading}} {{i18n 'upload_selector.uploading'}} {{uploadProgress}}% diff --git a/app/assets/javascripts/discourse/templates/components/backup-uploader.hbs b/app/assets/javascripts/discourse/templates/components/backup-uploader.hbs new file mode 100644 index 00000000000..affa124962b --- /dev/null +++ b/app/assets/javascripts/discourse/templates/components/backup-uploader.hbs @@ -0,0 +1,4 @@ + diff --git a/app/assets/javascripts/discourse/templates/components/csv-uploader.hbs b/app/assets/javascripts/discourse/templates/components/csv-uploader.hbs index 859433546c3..ad3120827f5 100644 --- a/app/assets/javascripts/discourse/templates/components/csv-uploader.hbs +++ b/app/assets/javascripts/discourse/templates/components/csv-uploader.hbs @@ -1,6 +1,6 @@ {{#if uploading}} {{i18n 'upload_selector.uploading'}} {{uploadProgress}}% diff --git a/app/assets/javascripts/discourse/templates/components/emoji-uploader.hbs b/app/assets/javascripts/discourse/templates/components/emoji-uploader.hbs index 85ad4157bb3..18ddcaef5a6 100644 --- a/app/assets/javascripts/discourse/templates/components/emoji-uploader.hbs +++ b/app/assets/javascripts/discourse/templates/components/emoji-uploader.hbs @@ -2,5 +2,5 @@ diff --git a/app/assets/javascripts/discourse/templates/components/image-uploader.hbs b/app/assets/javascripts/discourse/templates/components/image-uploader.hbs index 5a12cff39bc..8eb03a01457 100644 --- a/app/assets/javascripts/discourse/templates/components/image-uploader.hbs +++ b/app/assets/javascripts/discourse/templates/components/image-uploader.hbs @@ -2,7 +2,7 @@
{{#if backgroundStyle}} diff --git a/app/assets/javascripts/discourse/templates/components/images-uploader.hbs b/app/assets/javascripts/discourse/templates/components/images-uploader.hbs index a8a3f7a3812..6c53367c14b 100644 --- a/app/assets/javascripts/discourse/templates/components/images-uploader.hbs +++ b/app/assets/javascripts/discourse/templates/components/images-uploader.hbs @@ -1,6 +1,6 @@ {{#if uploading}} {{i18n 'upload_selector.uploading'}} {{uploadProgress}}% diff --git a/app/assets/javascripts/wizard/templates/components/wizard-field-image.hbs b/app/assets/javascripts/wizard/templates/components/wizard-field-image.hbs index 05f1cb29517..1e8fbc61f73 100644 --- a/app/assets/javascripts/wizard/templates/components/wizard-field-image.hbs +++ b/app/assets/javascripts/wizard/templates/components/wizard-field-image.hbs @@ -10,5 +10,5 @@ {{d-icon "picture-o"}} {{/if}} - + diff --git a/app/assets/stylesheets/common/base/upload.scss b/app/assets/stylesheets/common/base/upload.scss index dc2a87aa106..ddaf0d1d4ed 100644 --- a/app/assets/stylesheets/common/base/upload.scss +++ b/app/assets/stylesheets/common/base/upload.scss @@ -14,3 +14,8 @@ background-size: contain; } } + +.hidden-upload-field { + visibility: hidden; + position: absolute; +} diff --git a/app/assets/stylesheets/wizard.scss b/app/assets/stylesheets/wizard.scss index 7b4526453a5..a35b7830b18 100644 --- a/app/assets/stylesheets/wizard.scss +++ b/app/assets/stylesheets/wizard.scss @@ -328,6 +328,11 @@ body.wizard { } } + .wizard-hidden-upload-field { + visibility: hidden; + position: absolute; + } + .wizard-step-footer { display: flex; flex-direction: row; diff --git a/app/controllers/admin/backups_controller.rb b/app/controllers/admin/backups_controller.rb index ef8950fd5b5..2b4ffd3512f 100644 --- a/app/controllers/admin/backups_controller.rb +++ b/app/controllers/admin/backups_controller.rb @@ -1,20 +1,26 @@ require "backup_restore/backup_restore" +require "backup_restore/backup_store" class Admin::BackupsController < Admin::AdminController - before_action :ensure_backups_enabled skip_before_action :check_xhr, only: [:index, :show, :logs, :check_backup_chunk, :upload_backup_chunk] def index respond_to do |format| format.html do - store_preloaded("backups", MultiJson.dump(serialize_data(Backup.all, BackupSerializer))) store_preloaded("operations_status", MultiJson.dump(BackupRestore.operations_status)) store_preloaded("logs", MultiJson.dump(BackupRestore.logs)) render "default/empty" end + format.json do - render_serialized(Backup.all, BackupSerializer) + store = BackupRestore::BackupStore.create + + begin + render_serialized(store.files, BackupFileSerializer) + rescue BackupRestore::BackupStore::StorageError => e + render_json_error(e) + end end end end @@ -46,8 +52,11 @@ class Admin::BackupsController < Admin::AdminController end def email - if backup = Backup[params.fetch(:id)] - Jobs.enqueue(:download_backup_email, + store = BackupRestore::BackupStore.create + + if store.file(params.fetch(:id)).present? + Jobs.enqueue( + :download_backup_email, user_id: current_user.id, backup_file_path: url_for(controller: 'backups', action: 'show') ) @@ -59,28 +68,34 @@ class Admin::BackupsController < Admin::AdminController end def show - if !EmailBackupToken.compare(current_user.id, params.fetch(:token)) @error = I18n.t('download_backup_mailer.no_token') + return render template: 'admin/backups/show.html.erb', layout: 'no_ember', status: 422 end - if !@error && backup = Backup[params.fetch(:id)] + + store = BackupRestore::BackupStore.create + + if backup = store.file(params.fetch(:id), include_download_source: true) EmailBackupToken.del(current_user.id) StaffActionLogger.new(current_user).log_backup_download(backup) - headers['Content-Length'] = File.size(backup.path).to_s - send_file backup.path - else - if @error - render template: 'admin/backups/show.html.erb', layout: 'no_ember', status: 422 + + if store.remote? + redirect_to backup.source else - render body: nil, status: 404 + headers['Content-Length'] = File.size(backup.source).to_s + send_file backup.source end + else + render body: nil, status: 404 end end def destroy - if backup = Backup[params.fetch(:id)] + store = BackupRestore::BackupStore.create + + if backup = store.file(params.fetch(:id)) StaffActionLogger.new(current_user).log_backup_destroy(backup) - backup.remove + store.delete_file(backup.filename) render body: nil else render body: nil, status: 404 @@ -131,13 +146,13 @@ class Admin::BackupsController < Admin::AdminController end def check_backup_chunk - identifier = params.fetch(:resumableIdentifier) - filename = params.fetch(:resumableFilename) - chunk_number = params.fetch(:resumableChunkNumber) + identifier = params.fetch(:resumableIdentifier) + filename = params.fetch(:resumableFilename) + chunk_number = params.fetch(:resumableChunkNumber) current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i # path to chunk file - chunk = Backup.chunk_path(identifier, filename, chunk_number) + chunk = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number) # check chunk upload status status = HandleChunkUpload.check_chunk(chunk, current_chunk_size: current_chunk_size) @@ -145,21 +160,21 @@ class Admin::BackupsController < Admin::AdminController end def upload_backup_chunk - filename = params.fetch(:resumableFilename) + filename = params.fetch(:resumableFilename) total_size = params.fetch(:resumableTotalSize).to_i - return render status: 415, plain: I18n.t("backup.backup_file_should_be_tar_gz") unless /\.(tar\.gz|t?gz)$/i =~ filename - return render status: 415, plain: I18n.t("backup.not_enough_space_on_disk") unless has_enough_space_on_disk?(total_size) - return render status: 415, plain: I18n.t("backup.invalid_filename") unless !!(/^[a-zA-Z0-9\._\-]+$/ =~ filename) + return render status: 415, plain: I18n.t("backup.backup_file_should_be_tar_gz") unless valid_extension?(filename) + return render status: 415, plain: I18n.t("backup.not_enough_space_on_disk") unless has_enough_space_on_disk?(total_size) + return render status: 415, plain: I18n.t("backup.invalid_filename") unless valid_filename?(filename) - file = params.fetch(:file) - identifier = params.fetch(:resumableIdentifier) - chunk_number = params.fetch(:resumableChunkNumber).to_i - chunk_size = params.fetch(:resumableChunkSize).to_i + file = params.fetch(:file) + identifier = params.fetch(:resumableIdentifier) + chunk_number = params.fetch(:resumableChunkNumber).to_i + chunk_size = params.fetch(:resumableChunkSize).to_i current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i # path to chunk file - chunk = Backup.chunk_path(identifier, filename, chunk_number) + chunk = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number) # upload chunk HandleChunkUpload.upload_chunk(chunk, file: file) @@ -173,6 +188,24 @@ class Admin::BackupsController < Admin::AdminController render body: nil end + def create_upload_url + params.require(:filename) + filename = params.fetch(:filename) + + return render_error("backup.backup_file_should_be_tar_gz") unless valid_extension?(filename) + return render_error("backup.invalid_filename") unless valid_filename?(filename) + + store = BackupRestore::BackupStore.create + + begin + upload_url = store.generate_upload_url(filename) + rescue BackupRestore::BackupStore::BackupFileExists + return render_error("backup.file_exists") + end + + render json: success_json.merge(url: upload_url) + end + private def has_enough_space_on_disk?(size) @@ -183,4 +216,15 @@ class Admin::BackupsController < Admin::AdminController raise Discourse::InvalidAccess.new unless SiteSetting.enable_backups? end + def valid_extension?(filename) + /\.(tar\.gz|t?gz)$/i =~ filename + end + + def valid_filename?(filename) + !!(/^[a-zA-Z0-9\._\-]+$/ =~ filename) + end + + def render_error(message_key) + render json: failed_json.merge(message: I18n.t(message_key)) + end end diff --git a/app/controllers/admin/dashboard_next_controller.rb b/app/controllers/admin/dashboard_next_controller.rb index aaeb001a157..8da41c075db 100644 --- a/app/controllers/admin/dashboard_next_controller.rb +++ b/app/controllers/admin/dashboard_next_controller.rb @@ -27,8 +27,12 @@ class Admin::DashboardNextController < Admin::AdminController private def last_backup_taken_at - if last_backup = Backup.all.first - File.ctime(last_backup.path).utc + store = BackupRestore::BackupStore.create + + begin + store.latest_file&.last_modified + rescue BackupRestore::BackupStore::StorageError + nil end end end diff --git a/app/jobs/regular/backup_chunks_merger.rb b/app/jobs/regular/backup_chunks_merger.rb index 23cda4d2640..0fe070b801b 100644 --- a/app/jobs/regular/backup_chunks_merger.rb +++ b/app/jobs/regular/backup_chunks_merger.rb @@ -1,3 +1,6 @@ +require_dependency "backup_restore/local_backup_store" +require_dependency "backup_restore/backup_store" + module Jobs class BackupChunksMerger < Jobs::Base @@ -12,16 +15,24 @@ module Jobs raise Discourse::InvalidParameters.new(:identifier) if identifier.blank? raise Discourse::InvalidParameters.new(:chunks) if chunks <= 0 - backup_path = "#{Backup.base_directory}/#{filename}" + backup_path = "#{BackupRestore::LocalBackupStore.base_directory}/#{filename}" tmp_backup_path = "#{backup_path}.tmp" # path to tmp directory - tmp_directory = File.dirname(Backup.chunk_path(identifier, filename, 0)) + tmp_directory = File.dirname(BackupRestore::LocalBackupStore.chunk_path(identifier, filename, 0)) # merge all chunks - HandleChunkUpload.merge_chunks(chunks, upload_path: backup_path, tmp_upload_path: tmp_backup_path, model: Backup, identifier: identifier, filename: filename, tmp_directory: tmp_directory) + HandleChunkUpload.merge_chunks( + chunks, + upload_path: backup_path, + tmp_upload_path: tmp_backup_path, + identifier: identifier, + filename: filename, + tmp_directory: tmp_directory + ) # push an updated list to the clients - data = ActiveModel::ArraySerializer.new(Backup.all, each_serializer: BackupSerializer).as_json + store = BackupRestore::BackupStore.create + data = ActiveModel::ArraySerializer.new(store.files, each_serializer: BackupFileSerializer).as_json MessageBus.publish("/admin/backups", data, user_ids: User.staff.pluck(:id)) end diff --git a/app/jobs/scheduled/schedule_backup.rb b/app/jobs/scheduled/schedule_backup.rb index feca26cb658..b2e9129811a 100644 --- a/app/jobs/scheduled/schedule_backup.rb +++ b/app/jobs/scheduled/schedule_backup.rb @@ -7,8 +7,9 @@ module Jobs def execute(args) return unless SiteSetting.enable_backups? && SiteSetting.automatic_backups_enabled? - if latest_backup = Backup.all[0] - date = File.ctime(latest_backup.path).getutc.to_date + store = BackupRestore::BackupStore.create + if latest_backup = store.latest_file + date = latest_backup.last_modified.to_date return if (date + SiteSetting.backup_frequency.days) > Time.now.utc.to_date end diff --git a/app/models/admin_dashboard_data.rb b/app/models/admin_dashboard_data.rb index 8295934a729..33d2c87858d 100644 --- a/app/models/admin_dashboard_data.rb +++ b/app/models/admin_dashboard_data.rb @@ -209,7 +209,7 @@ class AdminDashboardData bad_keys = (SiteSetting.s3_access_key_id.blank? || SiteSetting.s3_secret_access_key.blank?) && !SiteSetting.s3_use_iam_profile return I18n.t('dashboard.s3_config_warning') if SiteSetting.enable_s3_uploads && (bad_keys || SiteSetting.s3_upload_bucket.blank?) - return I18n.t('dashboard.s3_backup_config_warning') if SiteSetting.enable_s3_backups && (bad_keys || SiteSetting.s3_backup_bucket.blank?) + return I18n.t('dashboard.s3_backup_config_warning') if SiteSetting.backup_location == BackupLocationSiteSetting::S3 && (bad_keys || SiteSetting.s3_backup_bucket.blank?) end nil end diff --git a/app/models/backup.rb b/app/models/backup.rb deleted file mode 100644 index 8dc6ae2e739..00000000000 --- a/app/models/backup.rb +++ /dev/null @@ -1,92 +0,0 @@ -require 'disk_space' - -class Backup - include ActiveModel::SerializerSupport - - attr_reader :filename - attr_accessor :size, :path, :link - - def initialize(filename) - @filename = filename - end - - def self.all - Dir.glob(File.join(Backup.base_directory, "*.{gz,tgz}")) - .sort_by { |file| File.mtime(file) } - .reverse - .map { |backup| Backup.create_from_filename(File.basename(backup)) } - end - - def self.[](filename) - path = File.join(Backup.base_directory, filename) - if File.exists?(path) - Backup.create_from_filename(filename) - else - nil - end - end - - def remove - File.delete(@path) if File.exists?(path) - after_remove_hook - end - - def after_create_hook - upload_to_s3 if SiteSetting.enable_s3_backups? - DiscourseEvent.trigger(:backup_created) - end - - def after_remove_hook - remove_from_s3 if SiteSetting.enable_s3_backups? && !SiteSetting.s3_disable_cleanup? - DiskSpace.reset_cached_stats unless SiteSetting.enable_s3_backups? - end - - def s3_bucket - return @s3_bucket if @s3_bucket - raise Discourse::SiteSettingMissing.new("s3_backup_bucket") if SiteSetting.s3_backup_bucket.blank? - @s3_bucket = SiteSetting.s3_backup_bucket.downcase - end - - def s3 - require "s3_helper" unless defined? S3Helper - @s3_helper ||= S3Helper.new(s3_bucket, '', S3Helper.s3_options(SiteSetting)) - end - - def upload_to_s3 - return unless s3 - File.open(@path) do |file| - s3.upload(file, @filename) - end - end - - def remove_from_s3 - return unless s3 - s3.remove(@filename) - end - - def self.base_directory - base_directory = File.join(Rails.root, "public", "backups", RailsMultisite::ConnectionManagement.current_db) - FileUtils.mkdir_p(base_directory) unless Dir.exists?(base_directory) - base_directory - end - - def self.chunk_path(identifier, filename, chunk_number) - File.join(Backup.base_directory, "tmp", identifier, "#{filename}.part#{chunk_number}") - end - - def self.create_from_filename(filename) - Backup.new(filename).tap do |b| - b.path = File.join(Backup.base_directory, b.filename) - b.link = UrlHelper.schemaless "#{Discourse.base_url}/admin/backups/#{b.filename}" - b.size = File.size(b.path) - end - end - - def self.remove_old - return if Rails.env.development? - all_backups = Backup.all - return if all_backups.size <= SiteSetting.maximum_backups - all_backups[SiteSetting.maximum_backups..-1].each(&:remove) - end - -end diff --git a/app/models/backup_file.rb b/app/models/backup_file.rb new file mode 100644 index 00000000000..86482f687d5 --- /dev/null +++ b/app/models/backup_file.rb @@ -0,0 +1,25 @@ +class BackupFile + include ActiveModel::SerializerSupport + + attr_reader :filename, + :size, + :last_modified, + :source + + def initialize(filename:, size:, last_modified:, source: nil) + @filename = filename + @size = size + @last_modified = last_modified + @source = source + end + + def ==(other) + attributes == other.attributes + end + + protected + + def attributes + [@filename, @size, @last_modified, @source] + end +end diff --git a/app/models/backup_location_site_setting.rb b/app/models/backup_location_site_setting.rb new file mode 100644 index 00000000000..aef3c89fec3 --- /dev/null +++ b/app/models/backup_location_site_setting.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require_dependency 'enum_site_setting'; + +class BackupLocationSiteSetting < EnumSiteSetting + LOCAL ||= "local" + S3 ||= "s3" + + def self.valid_value?(val) + values.any? { |v| v[:value] == val } + end + + def self.values + @values ||= [ + { name: "admin.backups.location.local", value: LOCAL }, + { name: "admin.backups.location.s3", value: S3 } + ] + end + + def self.translate_names? + true + end +end diff --git a/app/serializers/backup_file_serializer.rb b/app/serializers/backup_file_serializer.rb new file mode 100644 index 00000000000..f4cada57082 --- /dev/null +++ b/app/serializers/backup_file_serializer.rb @@ -0,0 +1,5 @@ +class BackupFileSerializer < ApplicationSerializer + attributes :filename, + :size, + :last_modified +end diff --git a/app/serializers/backup_serializer.rb b/app/serializers/backup_serializer.rb deleted file mode 100644 index 3d4356b09f4..00000000000 --- a/app/serializers/backup_serializer.rb +++ /dev/null @@ -1,3 +0,0 @@ -class BackupSerializer < ApplicationSerializer - attributes :filename, :size, :link -end diff --git a/app/services/handle_chunk_upload.rb b/app/services/handle_chunk_upload.rb index 75362e54e59..93aee6122fc 100644 --- a/app/services/handle_chunk_upload.rb +++ b/app/services/handle_chunk_upload.rb @@ -36,7 +36,6 @@ class HandleChunkUpload def merge_chunks upload_path = @params[:upload_path] tmp_upload_path = @params[:tmp_upload_path] - model = @params[:model] identifier = @params[:identifier] filename = @params[:filename] tmp_directory = @params[:tmp_directory] @@ -52,7 +51,7 @@ class HandleChunkUpload File.open(tmp_upload_path, "a") do |file| (1..@chunk).each do |chunk_number| # path to chunk - chunk_path = model.chunk_path(identifier, filename, chunk_number) + chunk_path = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number) # add chunk to file file << File.open(chunk_path).read end diff --git a/config/locales/client.en.yml b/config/locales/client.en.yml index e15b2eb44ae..157b35bb0be 100644 --- a/config/locales/client.en.yml +++ b/config/locales/client.en.yml @@ -3134,6 +3134,7 @@ en: label: "Upload" title: "Upload a backup to this instance" uploading: "Uploading..." + uploading_progress: "Uploading... {{progress}}%" success: "'{{filename}}' has successfully been uploaded. The file is now being processed and will take up to a minute to show up in the list." error: "There has been an error while uploading '{{filename}}': {{message}}" operations: @@ -3164,6 +3165,9 @@ en: label: "Rollback" title: "Rollback the database to previous working state" confirm: "Are you sure you want to rollback the database to the previous working state?" + location: + local: "Local" + s3: "Amazon S3" export_csv: success: "Export initiated, you will be notified via message when the process is complete." diff --git a/config/locales/server.en.yml b/config/locales/server.en.yml index f450f85e75a..6a5e45265a3 100644 --- a/config/locales/server.en.yml +++ b/config/locales/server.en.yml @@ -166,6 +166,7 @@ en: other: 'You specified the invalid choices %{name}' default_categories_already_selected: "You cannot select a category used in another list." s3_upload_bucket_is_required: "You cannot enable uploads to S3 unless you've provided the 's3_upload_bucket'." + s3_backup_requires_s3_settings: "You cannot use S3 as backup location unless you've provided the '%{setting_name}'." conflicting_google_user_id: 'The Google Account ID for this account has changed; staff intervention is required for security reasons. Please contact staff and point them to
https://meta.discourse.org/t/76575' activemodel: @@ -194,6 +195,10 @@ en: backup_file_should_be_tar_gz: "The backup file should be a .tar.gz archive." not_enough_space_on_disk: "There is not enough space on disk to upload this backup." invalid_filename: "The backup filename contains invalid characters. Valid characters are a-z 0-9 . - _." + file_exists: "The file you are trying to upload already exists." + location: + local: "Local" + s3: "Amazon S3" invalid_params: "You supplied invalid parameters to the request: %{message}" not_logged_in: "You need to be logged in to do that." @@ -1359,7 +1364,6 @@ en: maximum_backups: "The maximum amount of backups to keep on disk. Older backups are automatically deleted" automatic_backups_enabled: "Run automatic backups as defined in backup frequency" backup_frequency: "The number of days between backups." - enable_s3_backups: "Upload backups to S3 when complete. IMPORTANT: requires valid S3 credentials entered in Files settings." s3_backup_bucket: "The remote bucket to hold backups. WARNING: Make sure it is a private bucket." s3_endpoint: "The endpoint can be modified to backup to an S3 compatible service like DigitalOcean Spaces or Minio. WARNING: Use default if using AWS S3" s3_force_path_style: "Enforce path-style addressing for your custom endpoint. IMPORTANT: Required for using Minio uploads and backups." @@ -1367,6 +1371,7 @@ en: s3_disable_cleanup: "Disable the removal of backups from S3 when removed locally." backup_time_of_day: "Time of day UTC when the backup should occur." backup_with_uploads: "Include uploads in scheduled backups. Disabling this will only backup the database." + backup_location: "Location where backups are stored. IMPORTANT: S3 requires valid S3 credentials entered in Files settings." active_user_rate_limit_secs: "How frequently we update the 'last_seen_at' field, in seconds" verbose_localization: "Show extended localization tips in the UI" diff --git a/config/routes.rb b/config/routes.rb index 11a6106c46f..1d6caab2b07 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -280,6 +280,7 @@ Discourse::Application.routes.draw do put "readonly" => "backups#readonly" get "upload" => "backups#check_backup_chunk" post "upload" => "backups#upload_backup_chunk" + get "upload_url" => "backups#create_upload_url" end end diff --git a/config/site_settings.yml b/config/site_settings.yml index 725f736d5cc..1a882a9c1ef 100644 --- a/config/site_settings.yml +++ b/config/site_settings.yml @@ -1405,6 +1405,12 @@ backups: allow_restore: default: false shadowed_by_global: true + backup_location: + default: 'local' + type: enum + enum: 'BackupLocationSiteSetting' + shadowed_by_global: true + client: true maximum_backups: client: true default: 5 @@ -1417,9 +1423,6 @@ backups: max: 30 default: 7 shadowed_by_global: true - enable_s3_backups: - default: false - shadowed_by_global: true s3_backup_bucket: default: '' regex: '^[a-z0-9\-\/]+$' # can't use '.' when using HTTPS diff --git a/db/migrate/20180916195601_migrate_s3_backup_site_settings.rb b/db/migrate/20180916195601_migrate_s3_backup_site_settings.rb new file mode 100644 index 00000000000..064a4046276 --- /dev/null +++ b/db/migrate/20180916195601_migrate_s3_backup_site_settings.rb @@ -0,0 +1,33 @@ +class MigrateS3BackupSiteSettings < ActiveRecord::Migration[5.2] + def up + execute <<~SQL + UPDATE site_settings + SET name = 'backup_location', + data_type = 7, + value = 's3' + WHERE name = 'enable_s3_backups' AND value = 't'; + SQL + + execute <<~SQL + DELETE + FROM site_settings + WHERE name = 'enable_s3_backups'; + SQL + end + + def down + execute <<~SQL + UPDATE site_settings + SET name = 'enable_s3_backups', + data_type = 5, + value = 't' + WHERE name = 'backup_location' AND value = 's3'; + SQL + + execute <<~SQL + DELETE + FROM site_settings + WHERE name = 'backup_location'; + SQL + end +end diff --git a/lib/backup_restore/backup_restore.rb b/lib/backup_restore/backup_restore.rb index 0591b31fe11..8e69ed997a5 100644 --- a/lib/backup_restore/backup_restore.rb +++ b/lib/backup_restore/backup_restore.rb @@ -1,5 +1,5 @@ -require "backup_restore/backuper" -require "backup_restore/restorer" +require_dependency "backup_restore/backuper" +require_dependency "backup_restore/restorer" module BackupRestore diff --git a/lib/backup_restore/backup_store.rb b/lib/backup_restore/backup_store.rb new file mode 100644 index 00000000000..3ca9d705292 --- /dev/null +++ b/lib/backup_restore/backup_store.rb @@ -0,0 +1,74 @@ +module BackupRestore + # @abstract + class BackupStore + class BackupFileExists < RuntimeError; end + class StorageError < RuntimeError; end + + # @return [BackupStore] + def self.create(opts = {}) + case SiteSetting.backup_location + when BackupLocationSiteSetting::LOCAL + require_dependency "backup_restore/local_backup_store" + BackupRestore::LocalBackupStore.new(opts) + when BackupLocationSiteSetting::S3 + require_dependency "backup_restore/s3_backup_store" + BackupRestore::S3BackupStore.new(opts) + end + end + + # @return [Array] + def files + unsorted_files.sort_by { |file| -file.last_modified.to_i } + end + + # @return [BackupFile] + def latest_file + files.first + end + + def delete_old + return unless cleanup_allowed? + return if (backup_files = files).size <= SiteSetting.maximum_backups + + backup_files[SiteSetting.maximum_backups..-1].each do |file| + delete_file(file.filename) + end + end + + def remote? + fail NotImplementedError + end + + # @return [BackupFile] + def file(filename, include_download_source: false) + fail NotImplementedError + end + + def delete_file(filename) + fail NotImplementedError + end + + def download_file(filename, destination, failure_message = nil) + fail NotImplementedError + end + + def upload_file(filename, source_path, content_type) + fail NotImplementedError + end + + def generate_upload_url(filename) + fail NotImplementedError + end + + private + + # @return [Array] + def unsorted_files + fail NotImplementedError + end + + def cleanup_allowed? + true + end + end +end diff --git a/lib/backup_restore/backuper.rb b/lib/backup_restore/backuper.rb index e7fdc04ed6d..f03d3dd50f3 100644 --- a/lib/backup_restore/backuper.rb +++ b/lib/backup_restore/backuper.rb @@ -1,4 +1,5 @@ -require 'disk_space' +require "disk_space" +require "mini_mime" module BackupRestore @@ -10,6 +11,7 @@ module BackupRestore @client_id = opts[:client_id] @publish_to_message_bus = opts[:publish_to_message_bus] || false @with_uploads = opts[:with_uploads].nil? ? true : opts[:with_uploads] + @filename_override = opts[:filename] ensure_no_operation_is_running ensure_we_have_a_user @@ -42,6 +44,7 @@ module BackupRestore log "Finalizing backup..." @with_uploads ? create_archive : move_dump_backup + upload_archive after_create_hook rescue SystemExit @@ -52,9 +55,9 @@ module BackupRestore @success = false else @success = true - File.join(@archive_directory, @backup_filename) + @backup_filename ensure - remove_old + delete_old clean_up notify_user log "Finished!" @@ -75,12 +78,14 @@ module BackupRestore def initialize_state @success = false + @store = BackupRestore::BackupStore.create @current_db = RailsMultisite::ConnectionManagement.current_db @timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S") @tmp_directory = File.join(Rails.root, "tmp", "backups", @current_db, @timestamp) @dump_filename = File.join(@tmp_directory, BackupRestore::DUMP_FILE) - @archive_directory = File.join(Rails.root, "public", "backups", @current_db) - @archive_basename = File.join(@archive_directory, "#{SiteSetting.title.parameterize}-#{@timestamp}-#{BackupRestore::VERSION_PREFIX}#{BackupRestore.current_version}") + @archive_directory = BackupRestore::LocalBackupStore.base_directory(@current_db) + filename = @filename_override || "#{SiteSetting.title.parameterize}-#{@timestamp}" + @archive_basename = File.join(@archive_directory, "#{filename}-#{BackupRestore::VERSION_PREFIX}#{BackupRestore.current_version}") @backup_filename = if @with_uploads @@ -195,8 +200,10 @@ module BackupRestore def move_dump_backup log "Finalizing database dump file: #{@backup_filename}" + archive_filename = File.join(@archive_directory, @backup_filename) + Discourse::Utils.execute_command( - 'mv', @dump_filename, File.join(@archive_directory, @backup_filename), + 'mv', @dump_filename, archive_filename, failure_message: "Failed to move database dump file." ) @@ -243,17 +250,30 @@ module BackupRestore Discourse::Utils.execute_command('gzip', '-5', tar_filename, failure_message: "Failed to gzip archive.") end - def after_create_hook - log "Executing the after_create_hook for the backup..." - backup = Backup.create_from_filename(@backup_filename) - backup.after_create_hook + def upload_archive + return unless @store.remote? + + log "Uploading archive..." + content_type = MiniMime.lookup_by_filename(@backup_filename).content_type + archive_path = File.join(@archive_directory, @backup_filename) + @store.upload_file(@backup_filename, archive_path, content_type) + ensure + log "Removing archive from local storage..." + FileUtils.remove_file(archive_path, force: true) end - def remove_old - log "Removing old backups..." - Backup.remove_old + def after_create_hook + log "Executing the after_create_hook for the backup..." + DiscourseEvent.trigger(:backup_created) + end + + def delete_old + return if Rails.env.development? + + log "Deleting old backups..." + @store.delete_old rescue => ex - log "Something went wrong while removing old backups.", ex + log "Something went wrong while deleting old backups.", ex end def notify_user diff --git a/lib/backup_restore/local_backup_store.rb b/lib/backup_restore/local_backup_store.rb new file mode 100644 index 00000000000..8b0148ee3c3 --- /dev/null +++ b/lib/backup_restore/local_backup_store.rb @@ -0,0 +1,65 @@ +require_dependency "backup_restore/backup_store" +require_dependency "disk_space" + +module BackupRestore + class LocalBackupStore < BackupStore + def self.base_directory(current_db = nil) + current_db ||= RailsMultisite::ConnectionManagement.current_db + base_directory = File.join(Rails.root, "public", "backups", current_db) + FileUtils.mkdir_p(base_directory) unless Dir.exists?(base_directory) + base_directory + end + + def self.chunk_path(identifier, filename, chunk_number) + File.join(LocalBackupStore.base_directory, "tmp", identifier, "#{filename}.part#{chunk_number}") + end + + def initialize(opts = {}) + @base_directory = opts[:base_directory] || LocalBackupStore.base_directory + end + + def remote? + false + end + + def file(filename, include_download_source: false) + path = path_from_filename(filename) + create_file_from_path(path, include_download_source) if File.exists?(path) + end + + def delete_file(filename) + path = path_from_filename(filename) + + if File.exists?(path) + FileUtils.remove_file(path, force: true) + DiskSpace.reset_cached_stats + end + end + + def download_file(filename, destination, failure_message = "") + path = path_from_filename(filename) + Discourse::Utils.execute_command('cp', path, destination, failure_message: failure_message) + end + + private + + def unsorted_files + files = Dir.glob(File.join(@base_directory, "*.{gz,tgz}")) + files.map! { |filename| create_file_from_path(filename) } + files + end + + def path_from_filename(filename) + File.join(@base_directory, filename) + end + + def create_file_from_path(path, include_download_source = false) + BackupFile.new( + filename: File.basename(path), + size: File.size(path), + last_modified: File.mtime(path).utc, + source: include_download_source ? path : nil + ) + end + end +end diff --git a/lib/backup_restore/restorer.rb b/lib/backup_restore/restorer.rb index e8a98f0581c..62cfd24b672 100644 --- a/lib/backup_restore/restorer.rb +++ b/lib/backup_restore/restorer.rb @@ -133,12 +133,12 @@ module BackupRestore def initialize_state @success = false + @store = BackupRestore::BackupStore.create @db_was_changed = false @current_db = RailsMultisite::ConnectionManagement.current_db @current_version = BackupRestore.current_version @timestamp = Time.now.strftime("%Y-%m-%d-%H%M%S") @tmp_directory = File.join(Rails.root, "tmp", "restores", @current_db, @timestamp) - @source_filename = File.join(Backup.base_directory, @filename) @archive_filename = File.join(@tmp_directory, @filename) @tar_filename = @archive_filename[0...-3] @meta_filename = File.join(@tmp_directory, BackupRestore::METADATA_FILE) @@ -195,8 +195,15 @@ module BackupRestore end def copy_archive_to_tmp_directory - log "Copying archive to tmp directory..." - Discourse::Utils.execute_command('cp', @source_filename, @archive_filename, failure_message: "Failed to copy archive to tmp directory.") + if @store.remote? + log "Downloading archive to tmp directory..." + failure_message = "Failed to download archive to tmp directory." + else + log "Copying archive to tmp directory..." + failure_message = "Failed to copy archive to tmp directory." + end + + @store.download_file(@filename, @archive_filename, failure_message) end def unzip_archive diff --git a/lib/backup_restore/s3_backup_store.rb b/lib/backup_restore/s3_backup_store.rb new file mode 100644 index 00000000000..cbbc916df08 --- /dev/null +++ b/lib/backup_restore/s3_backup_store.rb @@ -0,0 +1,95 @@ +require_dependency "backup_restore/backup_store" +require_dependency "s3_helper" + +module BackupRestore + class S3BackupStore < BackupStore + DOWNLOAD_URL_EXPIRES_AFTER_SECONDS ||= 15 + UPLOAD_URL_EXPIRES_AFTER_SECONDS ||= 21_600 # 6 hours + + def initialize(opts = {}) + s3_options = S3Helper.s3_options(SiteSetting) + s3_options.merge!(opts[:s3_options]) if opts[:s3_options] + @s3_helper = S3Helper.new(SiteSetting.s3_backup_bucket, '', s3_options) + end + + def remote? + true + end + + def file(filename, include_download_source: false) + obj = @s3_helper.object(filename) + create_file_from_object(obj, include_download_source) if obj.exists? + end + + def delete_file(filename) + obj = @s3_helper.object(filename) + obj.delete if obj.exists? + end + + def download_file(filename, destination_path, failure_message = nil) + unless @s3_helper.object(filename).download_file(destination_path) + raise failure_message&.to_s || "Failed to download file" + end + end + + def upload_file(filename, source_path, content_type) + obj = @s3_helper.object(filename) + raise BackupFileExists.new if obj.exists? + + obj.upload_file(source_path, content_type: content_type) + end + + def generate_upload_url(filename) + obj = @s3_helper.object(filename) + raise BackupFileExists.new if obj.exists? + + presigned_url(obj, :put, UPLOAD_URL_EXPIRES_AFTER_SECONDS) + end + + private + + def unsorted_files + objects = [] + + @s3_helper.list.each do |obj| + if obj.key.match?(/\.t?gz$/i) + objects << create_file_from_object(obj) + end + end + + objects + rescue Aws::Errors::ServiceError => e + Rails.logger.warn("Failed to list backups from S3: #{e.message.presence || e.class.name}") + raise StorageError + end + + def create_file_from_object(obj, include_download_source = false) + BackupFile.new( + filename: File.basename(obj.key), + size: obj.size, + last_modified: obj.last_modified, + source: include_download_source ? presigned_url(obj, :get, DOWNLOAD_URL_EXPIRES_AFTER_SECONDS) : nil + ) + end + + def presigned_url(obj, method, expires_in_seconds) + ensure_cors! + obj.presigned_url(method, expires_in: expires_in_seconds) + end + + def ensure_cors! + rule = { + allowed_headers: ["*"], + allowed_methods: ["PUT"], + allowed_origins: [Discourse.base_url_no_prefix], + max_age_seconds: 3000 + } + + @s3_helper.ensure_cors!([rule]) + end + + def cleanup_allowed? + !SiteSetting.s3_disable_cleanup + end + end +end diff --git a/lib/disk_space.rb b/lib/disk_space.rb index 8c5e385cec6..bf9bb0db0f3 100644 --- a/lib/disk_space.rb +++ b/lib/disk_space.rb @@ -2,8 +2,8 @@ class DiskSpace extend ActionView::Helpers::NumberHelper - DISK_SPACE_STATS_CACHE_KEY = 'disk_space_stats'.freeze - DISK_SPACE_STATS_UPDATED_CACHE_KEY = 'disk_space_stats_updated'.freeze + DISK_SPACE_STATS_CACHE_KEY ||= 'disk_space_stats'.freeze + DISK_SPACE_STATS_UPDATED_CACHE_KEY ||= 'disk_space_stats_updated'.freeze def self.uploads_used_bytes # used(uploads_path) @@ -24,7 +24,7 @@ class DiskSpace end def self.backups_path - Backup.base_directory + BackupRestore::LocalBackupStore.base_directory end def self.uploads_path diff --git a/lib/json_error.rb b/lib/json_error.rb index 1675ac562b2..7d763a23085 100644 --- a/lib/json_error.rb +++ b/lib/json_error.rb @@ -24,6 +24,12 @@ module JsonError # If we're passed an array, it's an array of error messages return { errors: obj.map(&:to_s) } if obj.is_a?(Array) && obj.present? + if obj.is_a?(Exception) + message = obj.cause.message.presence || obj.cause.class.name if obj.cause + message = obj.message.presence || obj.class.name if message.blank? + return { errors: [message] } if message.present? + end + # Log a warning (unless obj is nil) Rails.logger.warn("create_errors_json called with unrecognized type: #{obj.inspect}") if obj diff --git a/lib/s3_helper.rb b/lib/s3_helper.rb index a46fdc2d22d..b4bcb445197 100644 --- a/lib/s3_helper.rb +++ b/lib/s3_helper.rb @@ -51,7 +51,7 @@ class S3Helper # make sure we have a cors config for assets # otherwise we will have no fonts - def ensure_cors! + def ensure_cors!(rules = nil) rule = nil begin @@ -63,17 +63,17 @@ class S3Helper end unless rule - puts "installing CORS rule" + rules = [{ + allowed_headers: ["Authorization"], + allowed_methods: ["GET", "HEAD"], + allowed_origins: ["*"], + max_age_seconds: 3000 + }] if rules.nil? s3_resource.client.put_bucket_cors( bucket: @s3_bucket_name, cors_configuration: { - cors_rules: [{ - allowed_headers: ["Authorization"], - allowed_methods: ["GET", "HEAD"], - allowed_origins: ["*"], - max_age_seconds: 3000 - }] + cors_rules: rules } ) end @@ -137,10 +137,7 @@ class S3Helper end def list(prefix = "") - if @s3_bucket_folder_path.present? - prefix = File.join(@s3_bucket_folder_path, prefix) - end - + prefix = get_path_for_s3_upload(prefix) s3_bucket.objects(prefix: prefix) end @@ -159,6 +156,11 @@ class S3Helper ) end + def object(path) + path = get_path_for_s3_upload(path) + s3_bucket.object(path) + end + def self.s3_options(obj) opts = { region: obj.s3_region, endpoint: SiteSetting.s3_endpoint, diff --git a/lib/site_settings/validations.rb b/lib/site_settings/validations.rb index ae38285cc82..0d50f488b3d 100644 --- a/lib/site_settings/validations.rb +++ b/lib/site_settings/validations.rb @@ -1,8 +1,8 @@ module SiteSettings; end module SiteSettings::Validations - def validate_error(key) - raise Discourse::InvalidParameters.new(I18n.t("errors.site_settings.#{key}")) + def validate_error(key, opts = {}) + raise Discourse::InvalidParameters.new(I18n.t("errors.site_settings.#{key}", opts)) end def validate_default_categories(new_val, default_categories_selected) @@ -53,4 +53,13 @@ module SiteSettings::Validations validate_error :s3_upload_bucket_is_required if new_val == "t" && SiteSetting.s3_upload_bucket.blank? end + def validate_backup_location(new_val) + return unless new_val == BackupLocationSiteSetting::S3 + validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_backup_bucket") if SiteSetting.s3_backup_bucket.blank? + + unless SiteSetting.s3_use_iam_profile + validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_access_key_id") if SiteSetting.s3_access_key_id.blank? + validate_error(:s3_backup_requires_s3_settings, setting_name: "s3_secret_access_key") if SiteSetting.s3_secret_access_key.blank? + end + end end diff --git a/lib/tasks/s3.rake b/lib/tasks/s3.rake index 7b2889ae938..b19da1de3a1 100644 --- a/lib/tasks/s3.rake +++ b/lib/tasks/s3.rake @@ -84,6 +84,8 @@ end task 's3:upload_assets' => :environment do ensure_s3_configured! + + puts "installing CORS rule" helper.ensure_cors! assets.each do |asset| diff --git a/script/discourse b/script/discourse index d0fccded04c..ddfc6181933 100755 --- a/script/discourse +++ b/script/discourse @@ -62,18 +62,20 @@ class DiscourseCLI < Thor require "backup_restore/backuper" puts "Starting backup..." - backuper = BackupRestore::Backuper.new(Discourse.system_user.id) - backup = backuper.run - if filename.present? - puts "Moving '#{backup}' to '#{filename}'" - puts "Including version number into '#{filename}'" - version_string = File.basename(backup)[/-#{BackupRestore::VERSION_PREFIX}\d{14}/] - filename = filename.dup.insert(filename.index('.'), version_string) - FileUtils.mv(backup, filename) - backup = filename - end + backuper = BackupRestore::Backuper.new(Discourse.system_user.id, filename: filename) + backup_filename = backuper.run puts "Backup done." - puts "Output file is in: #{backup}", "" + + store = BackupRestore::BackupStore.create + + if store.remote? + location = BackupLocationSiteSetting.values.find { |v| v[:value] == SiteSetting.backup_location } + location = I18n.t("admin_js.#{location[:name]}") if location + puts "Output file is stored on #{location} as #{backup_filename}", "" + else + backup = store.file(backup_filename, include_download_source: true) + puts "Output file is in: #{backup.source}", "" + end exit(1) unless backuper.success end @@ -92,20 +94,22 @@ class DiscourseCLI < Thor discourse = './script/discourse' end + load_rails + require "backup_restore/backup_restore" + require "backup_restore/restorer" + require "backup_restore/backup_store" + if !filename puts "You must provide a filename to restore. Did you mean one of the following?\n\n" - Dir["public/backups/default/*"].sort_by { |path| File.mtime(path) }.reverse.each do |f| - puts "#{discourse} restore #{File.basename(f)}" + store = BackupRestore::BackupStore.create + store.files.each do |file| + puts "#{discourse} restore #{file.filename}" end return end - load_rails - require "backup_restore/backup_restore" - require "backup_restore/restorer" - begin puts "Starting restore: #{filename}" restorer = BackupRestore::Restorer.new(Discourse.system_user.id, filename: filename) diff --git a/spec/lib/backup_restore/local_backup_store_spec.rb b/spec/lib/backup_restore/local_backup_store_spec.rb new file mode 100644 index 00000000000..efe87583d90 --- /dev/null +++ b/spec/lib/backup_restore/local_backup_store_spec.rb @@ -0,0 +1,56 @@ +require 'rails_helper' +require 'backup_restore/local_backup_store' +require_relative 'shared_examples_for_backup_store' + +describe BackupRestore::LocalBackupStore do + before(:all) do + @base_directory = Dir.mktmpdir + @paths = [] + end + + after(:all) do + FileUtils.remove_dir(@base_directory, true) + end + + before do + SiteSetting.backup_location = BackupLocationSiteSetting::LOCAL + end + + subject(:store) { BackupRestore::BackupStore.create(base_directory: @base_directory) } + let(:expected_type) { BackupRestore::LocalBackupStore } + + it_behaves_like "backup store" + + it "is not a remote store" do + expect(store.remote?).to eq(false) + end + + def create_backups + create_file(filename: "b.tar.gz", last_modified: "2018-09-13T15:10:00Z", size_in_bytes: 17) + create_file(filename: "a.tgz", last_modified: "2018-02-11T09:27:00Z", size_in_bytes: 29) + create_file(filename: "r.sql.gz", last_modified: "2017-12-20T03:48:00Z", size_in_bytes: 11) + create_file(filename: "no-backup.txt", last_modified: "2018-09-05T14:27:00Z", size_in_bytes: 12) + end + + def remove_backups + @paths.each { |path| File.delete(path) if File.exists?(path) } + @paths.clear + end + + def create_file(filename:, last_modified:, size_in_bytes:) + path = File.join(@base_directory, filename) + return if File.exists?(path) + + @paths << path + FileUtils.touch(path) + File.truncate(path, size_in_bytes) + + time = Time.parse(last_modified) + File.utime(time, time, path) + end + + def source_regex(filename) + path = File.join(@base_directory, filename) + /^#{Regexp.escape(path)}$/ + end +end diff --git a/spec/lib/backup_restore/s3_backup_store_spec.rb b/spec/lib/backup_restore/s3_backup_store_spec.rb new file mode 100644 index 00000000000..33cfa945b00 --- /dev/null +++ b/spec/lib/backup_restore/s3_backup_store_spec.rb @@ -0,0 +1,109 @@ +require 'rails_helper' +require 'backup_restore/s3_backup_store' +require_relative 'shared_examples_for_backup_store' + +describe BackupRestore::S3BackupStore do + before(:all) do + @s3_client = Aws::S3::Client.new(stub_responses: true) + @s3_options = { client: @s3_client } + + @objects = [] + + @s3_client.stub_responses(:list_objects, -> (context) do + expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket) + expect(context.params[:prefix]).to be_blank + + { contents: @objects } + end) + + @s3_client.stub_responses(:delete_object, -> (context) do + expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket) + expect do + @objects.delete_if { |obj| obj[:key] == context.params[:key] } + end.to change { @objects } + end) + + @s3_client.stub_responses(:head_object, -> (context) do + expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket) + + if object = @objects.find { |obj| obj[:key] == context.params[:key] } + { content_length: object[:size], last_modified: object[:last_modified] } + else + { status_code: 404, headers: {}, body: "", } + end + end) + + @s3_client.stub_responses(:get_object, -> (context) do + expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket) + + if object = @objects.find { |obj| obj[:key] == context.params[:key] } + { content_length: object[:size], body: "A" * object[:size] } + else + { status_code: 404, headers: {}, body: "", } + end + end) + + @s3_client.stub_responses(:put_object, -> (context) do + expect(context.params[:bucket]).to eq(SiteSetting.s3_backup_bucket) + + @objects << { + key: context.params[:key], + size: context.params[:body].size, + last_modified: Time.zone.now + } + end) + end + + before do + SiteSetting.s3_backup_bucket = "s3-backup-bucket" + SiteSetting.s3_access_key_id = "s3-access-key-id" + SiteSetting.s3_secret_access_key = "s3-secret-access-key" + SiteSetting.backup_location = BackupLocationSiteSetting::S3 + end + + subject(:store) { BackupRestore::BackupStore.create(s3_options: @s3_options) } + let(:expected_type) { BackupRestore::S3BackupStore } + + it_behaves_like "backup store" + it_behaves_like "remote backup store" + + context "S3 specific behavior" do + before { create_backups } + after(:all) { remove_backups } + + it "doesn't delete files when cleanup is disabled" do + SiteSetting.maximum_backups = 1 + SiteSetting.s3_disable_cleanup = true + + expect { store.delete_old }.to_not change { store.files } + end + end + + def create_backups + @objects.clear + @objects << { key: "b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z") } + @objects << { key: "a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z") } + @objects << { key: "r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z") } + @objects << { key: "no-backup.txt", size: 12, last_modified: Time.parse("2018-09-05T14:27:00Z") } + end + + def remove_backups + @objects.clear + end + + def source_regex(filename) + bucket = Regexp.escape(SiteSetting.s3_backup_bucket) + filename = Regexp.escape(filename) + expires = BackupRestore::S3BackupStore::DOWNLOAD_URL_EXPIRES_AFTER_SECONDS + + /\Ahttps:\/\/#{bucket}.*\/#{filename}\?.*X-Amz-Expires=#{expires}.*X-Amz-Signature=.*\z/ + end + + def upload_url_regex(filename) + bucket = Regexp.escape(SiteSetting.s3_backup_bucket) + filename = Regexp.escape(filename) + expires = BackupRestore::S3BackupStore::UPLOAD_URL_EXPIRES_AFTER_SECONDS + + /\Ahttps:\/\/#{bucket}.*\/#{filename}\?.*X-Amz-Expires=#{expires}.*X-Amz-Signature=.*\z/ + end +end diff --git a/spec/lib/backup_restore/shared_examples_for_backup_store.rb b/spec/lib/backup_restore/shared_examples_for_backup_store.rb new file mode 100644 index 00000000000..2ab716b6030 --- /dev/null +++ b/spec/lib/backup_restore/shared_examples_for_backup_store.rb @@ -0,0 +1,176 @@ +shared_context "backups" do + before { create_backups } + after(:all) { remove_backups } + + let(:backup1) { BackupFile.new(filename: "b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z")) } + let(:backup2) { BackupFile.new(filename: "a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z")) } + let(:backup3) { BackupFile.new(filename: "r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z")) } +end + +shared_examples "backup store" do + it "creates the correct backup store" do + expect(store).to be_a(expected_type) + end + + context "without backup files" do + describe "#files" do + it "returns an empty array when there are no files" do + expect(store.files).to be_empty + end + end + + describe "#latest_file" do + it "returns nil when there are no files" do + expect(store.latest_file).to be_nil + end + end + end + + context "with backup files" do + include_context "backups" + + describe "#files" do + it "sorts files by last modified date in descending order" do + expect(store.files).to eq([backup1, backup2, backup3]) + end + + it "returns only *.gz and *.tgz files" do + files = store.files + expect(files).to_not be_empty + expect(files.map(&:filename)).to contain_exactly(backup1.filename, backup2.filename, backup3.filename) + end + end + + describe "#latest_file" do + it "returns the most recent backup file" do + expect(store.latest_file).to eq(backup1) + end + + it "returns nil when there are no files" do + store.files.each { |file| store.delete_file(file.filename) } + expect(store.latest_file).to be_nil + end + end + + describe "#delete_old" do + it "does nothing if the number of files is <= maximum_backups" do + SiteSetting.maximum_backups = 3 + + store.delete_old + expect(store.files).to eq([backup1, backup2, backup3]) + end + + it "deletes files starting by the oldest" do + SiteSetting.maximum_backups = 1 + + store.delete_old + expect(store.files).to eq([backup1]) + end + end + + describe "#file" do + it "returns information about the file when the file exists" do + expect(store.file(backup1.filename)).to eq(backup1) + end + + it "returns nil when the file doesn't exist" do + expect(store.file("foo.gz")).to be_nil + end + + it "includes the file's source location if it is requested" do + file = store.file(backup1.filename, include_download_source: true) + expect(file.source).to match(source_regex(backup1.filename)) + end + end + + describe "#delete_file" do + it "deletes file when the file exists" do + expect(store.files).to include(backup1) + store.delete_file(backup1.filename) + expect(store.files).to_not include(backup1) + + expect(store.file(backup1.filename)).to be_nil + end + + it "does nothing when the file doesn't exist" do + expect { store.delete_file("foo.gz") }.to_not change { store.files } + end + end + + describe "#download_file" do + it "downloads file to the destination" do + filename = backup1.filename + + Dir.mktmpdir do |path| + destination_path = File.join(path, File.basename(filename)) + store.download_file(filename, destination_path) + + expect(File.exists?(destination_path)).to eq(true) + expect(File.size(destination_path)).to eq(backup1.size) + end + end + + it "raises an exception when the download fails" do + filename = backup1.filename + destination_path = Dir.mktmpdir { |path| File.join(path, File.basename(filename)) } + + expect { store.download_file(filename, destination_path) }.to raise_exception(StandardError) + end + end + end +end + +shared_examples "remote backup store" do + it "is a remote store" do + expect(store.remote?).to eq(true) + end + + context "with backups" do + include_context "backups" + + describe "#upload_file" do + it "uploads file into store" do + freeze_time + + backup = BackupFile.new( + filename: "foo.tar.gz", + size: 33, + last_modified: Time.zone.now + ) + + expect(store.files).to_not include(backup) + + Tempfile.create(backup.filename) do |file| + file.write("A" * backup.size) + file.close + + store.upload_file(backup.filename, file.path, "application/gzip") + end + + expect(store.files).to include(backup) + expect(store.file(backup.filename)).to eq(backup) + end + + it "raises an exception when a file with same filename exists" do + Tempfile.create(backup1.filename) do |file| + expect { store.upload_file(backup1.filename, file.path, "application/gzip") } + .to raise_exception(BackupRestore::BackupStore::BackupFileExists) + end + end + end + + describe "#generate_upload_url" do + it "generates upload URL" do + filename = "foo.tar.gz" + url = store.generate_upload_url(filename) + + expect(url).to match(upload_url_regex(filename)) + end + + it "raises an exeption when a file with same filename exists" do + expect { store.generate_upload_url(backup1.filename) } + .to raise_exception(BackupRestore::BackupStore::BackupFileExists) + end + end + end +end diff --git a/spec/models/admin_dashboard_data_spec.rb b/spec/models/admin_dashboard_data_spec.rb index 6d81f623dd5..690ed831e88 100644 --- a/spec/models/admin_dashboard_data_spec.rb +++ b/spec/models/admin_dashboard_data_spec.rb @@ -210,9 +210,9 @@ describe AdminDashboardData do end context 'when setting is enabled' do - let(:setting_enabled) { true } before do - SiteSetting.public_send("#{setting_key}=", setting_enabled) + all_setting_keys.each { |key| SiteSetting.public_send("#{key}=", 'foo') } + SiteSetting.public_send("#{setting[:key]}=", setting[:enabled_value]) SiteSetting.public_send("#{bucket_key}=", bucket_value) end @@ -229,7 +229,7 @@ describe AdminDashboardData do context 'when bucket is filled in' do let(:bucket_value) { 'a' } before do - SiteSetting.public_send("s3_use_iam_profile=", use_iam_profile) + SiteSetting.s3_use_iam_profile = use_iam_profile end context 'when using iam profile' do @@ -260,7 +260,7 @@ describe AdminDashboardData do context 'when setting is not enabled' do before do - SiteSetting.public_send("#{setting_key}=", false) + SiteSetting.public_send("#{setting[:key]}=", setting[:disabled_value]) end it "always returns nil" do @@ -272,13 +272,25 @@ describe AdminDashboardData do end describe 'uploads' do - let(:setting_key) { :enable_s3_uploads } + let(:setting) do + { + key: :enable_s3_uploads, + enabled_value: true, + disabled_value: false + } + end let(:bucket_key) { :s3_upload_bucket } include_examples 'problem detection for s3-dependent setting' end describe 'backups' do - let(:setting_key) { :enable_s3_backups } + let(:setting) do + { + key: :backup_location, + enabled_value: BackupLocationSiteSetting::S3, + disabled_value: BackupLocationSiteSetting::LOCAL + } + end let(:bucket_key) { :s3_backup_bucket } include_examples 'problem detection for s3-dependent setting' end diff --git a/spec/models/backup_spec.rb b/spec/models/backup_spec.rb deleted file mode 100644 index 1247ff3d577..00000000000 --- a/spec/models/backup_spec.rb +++ /dev/null @@ -1,130 +0,0 @@ -require 'rails_helper' -require "s3_helper" - -require_dependency 'backup' - -describe Backup do - - let(:b1) { Backup.new('backup1') } - let(:b2) { Backup.new('backup2') } - let(:b3) { Backup.new('backup3') } - - before do - Backup.stubs(:all).returns([b1, b2, b3]) - end - - context '#remove_old' do - it "does nothing if there aren't more backups than the setting" do - SiteSetting.maximum_backups = 3 - Backup.any_instance.expects(:remove).never - Backup.remove_old - end - - it "calls remove on the backups over our limit" do - SiteSetting.maximum_backups = 1 - b1.expects(:remove).never - b2.expects(:remove).once - b3.expects(:remove).once - Backup.remove_old - end - end - - shared_context 's3 helpers' do - let(:client) { Aws::S3::Client.new(stub_responses: true) } - let(:resource) { Aws::S3::Resource.new(client: client) } - let!(:s3_bucket) { resource.bucket("s3-upload-bucket") } - let(:s3_helper) { b1.s3 } - - before(:each) do - SiteSetting.s3_backup_bucket = "s3-upload-bucket" - SiteSetting.s3_access_key_id = "s3-access-key-id" - SiteSetting.s3_secret_access_key = "s3-secret-access-key" - end - end - - context ".after_create_hook" do - context "when SiteSetting is true" do - include_context "s3 helpers" - - before do - SiteSetting.enable_s3_backups = true - end - - it "should upload the backup to S3 with the right paths" do - b1.path = 'some/path/backup.gz' - File.expects(:open).with(b1.path).yields(stub) - - s3_helper.expects(:s3_bucket).returns(s3_bucket) - s3_object = stub - - s3_bucket.expects(:object).with(b1.filename).returns(s3_object) - s3_object.expects(:upload_file) - - b1.after_create_hook - end - - context "when s3_backup_bucket includes folders path" do - before do - SiteSetting.s3_backup_bucket = "s3-upload-bucket/discourse-backups" - end - - it "should upload the backup to S3 with the right paths" do - b1.path = 'some/path/backup.gz' - File.expects(:open).with(b1.path).yields(stub) - - s3_helper.expects(:s3_bucket).returns(s3_bucket) - s3_object = stub - - s3_bucket.expects(:object).with("discourse-backups/#{b1.filename}").returns(s3_object) - s3_object.expects(:upload_file) - - b1.after_create_hook - end - end - end - - it "calls upload_to_s3 if the SiteSetting is false" do - SiteSetting.enable_s3_backups = false - b1.expects(:upload_to_s3).never - b1.after_create_hook - end - end - - context ".after_remove_hook" do - include_context "s3 helpers" - - context "when SiteSetting is true" do - before do - SiteSetting.enable_s3_backups = true - end - - context "when s3_backup_bucket includes folders path" do - before do - SiteSetting.s3_backup_bucket = "s3-upload-bucket/discourse-backups" - end - - it "should upload the backup to S3 with the right paths" do - s3_helper.expects(:s3_bucket).returns(s3_bucket) - s3_object = stub - - s3_bucket.expects(:object).with("discourse-backups/#{b1.filename}").returns(s3_object) - s3_object.expects(:delete) - - b1.after_remove_hook - end - end - end - - context "when SiteSetting is false" do - before do - SiteSetting.enable_s3_backups = false - end - - it "doesn’t call remove_from_s3" do - b1.expects(:remove_from_s3).never - b1.after_remove_hook - end - end - end - -end diff --git a/spec/requests/admin/backups_controller_spec.rb b/spec/requests/admin/backups_controller_spec.rb index 4224bcb07b1..f7344138746 100644 --- a/spec/requests/admin/backups_controller_spec.rb +++ b/spec/requests/admin/backups_controller_spec.rb @@ -4,6 +4,25 @@ RSpec.describe Admin::BackupsController do let(:admin) { Fabricate(:admin) } let(:backup_filename) { "2014-02-10-065935.tar.gz" } let(:backup_filename2) { "2014-02-11-065935.tar.gz" } + let(:store) { BackupRestore::LocalBackupStore.new } + + def create_backup_files(*filenames) + @paths = filenames.map do |filename| + path = backup_path(filename) + File.open(path, "w") { |f| f.write("test backup") } + path + end + end + + def backup_path(filename) + File.join(BackupRestore::LocalBackupStore.base_directory, filename) + end + + def map_preloaded + controller.instance_variable_get("@preloaded").map do |key, value| + [key, JSON.parse(value)] + end.to_h + end it "is a subclass of AdminController" do expect(Admin::BackupsController < Admin::AdminController).to eq(true) @@ -11,10 +30,14 @@ RSpec.describe Admin::BackupsController do before do sign_in(admin) + SiteSetting.backup_location = BackupLocationSiteSetting::LOCAL end after do $redis.flushall + + @paths&.each { |path| File.delete(path) if File.exists?(path) } + @paths = nil end describe "#index" do @@ -29,11 +52,7 @@ RSpec.describe Admin::BackupsController do get "/admin/backups.html" expect(response.status).to eq(200) - preloaded = controller.instance_variable_get("@preloaded").map do |key, value| - [key, JSON.parse(value)] - end.to_h - - expect(preloaded["backups"].size).to eq(Backup.all.size) + preloaded = map_preloaded expect(preloaded["operations_status"].symbolize_keys).to eq(BackupRestore.operations_status) expect(preloaded["logs"].size).to eq(BackupRestore.logs.size) end @@ -42,23 +61,14 @@ RSpec.describe Admin::BackupsController do context "json format" do it "returns a list of all the backups" do begin - paths = [] - [backup_filename, backup_filename2].each do |name| - path = File.join(Backup.base_directory, name) - paths << path - File.open(path, "w") { |f| f.write("hello") } - Backup.create_from_filename(name) - end + create_backup_files(backup_filename, backup_filename2) get "/admin/backups.json" - expect(response.status).to eq(200) - json = JSON.parse(response.body).map { |backup| backup["filename"] } - expect(json).to include(backup_filename) - expect(json).to include(backup_filename2) - ensure - paths.each { |path| File.delete(path) } + filenames = JSON.parse(response.body).map { |backup| backup["filename"] } + expect(filenames).to include(backup_filename) + expect(filenames).to include(backup_filename2) end end end @@ -88,36 +98,23 @@ RSpec.describe Admin::BackupsController do it "uses send_file to transmit the backup" do begin token = EmailBackupToken.set(admin.id) - path = File.join(Backup.base_directory, backup_filename) - File.open(path, "w") { |f| f.write("hello") } - - Backup.create_from_filename(backup_filename) + create_backup_files(backup_filename) expect do get "/admin/backups/#{backup_filename}.json", params: { token: token } end.to change { UserHistory.where(action: UserHistory.actions[:backup_download]).count }.by(1) - expect(response.headers['Content-Length']).to eq("5") + expect(response.headers['Content-Length']).to eq("11") expect(response.headers['Content-Disposition']).to match(/attachment; filename/) - ensure - File.delete(path) - EmailBackupToken.del(admin.id) end end it "returns 422 when token is bad" do begin - path = File.join(Backup.base_directory, backup_filename) - File.open(path, "w") { |f| f.write("hello") } - - Backup.create_from_filename(backup_filename) - get "/admin/backups/#{backup_filename}.json", params: { token: "bad_value" } expect(response.status).to eq(422) expect(response.headers['Content-Disposition']).not_to match(/attachment; filename/) - ensure - File.delete(path) end end @@ -125,20 +122,16 @@ RSpec.describe Admin::BackupsController do token = EmailBackupToken.set(admin.id) get "/admin/backups/#{backup_filename}.json", params: { token: token } - EmailBackupToken.del(admin.id) expect(response.status).to eq(404) end end describe '#destroy' do - let(:b) { Backup.new(backup_filename) } - it "removes the backup if found" do begin - path = File.join(Backup.base_directory, backup_filename) - File.open(path, "w") { |f| f.write("hello") } - - Backup.create_from_filename(backup_filename) + path = backup_path(backup_filename) + create_backup_files(backup_filename) + expect(File.exists?(path)).to eq(true) expect do delete "/admin/backups/#{backup_filename}.json" @@ -146,8 +139,6 @@ RSpec.describe Admin::BackupsController do expect(response.status).to eq(200) expect(File.exists?(path)).to eq(false) - ensure - File.delete(path) if File.exists?(path) end end @@ -162,9 +153,7 @@ RSpec.describe Admin::BackupsController do get "/admin/backups/logs.html" expect(response.status).to eq(200) - preloaded = controller.instance_variable_get("@preloaded").map do |key, value| - [key, JSON.parse(value)] - end.to_h + preloaded = map_preloaded expect(preloaded["operations_status"].symbolize_keys).to eq(BackupRestore.operations_status) expect(preloaded["logs"].size).to eq(BackupRestore.logs.size) @@ -228,6 +217,7 @@ RSpec.describe Admin::BackupsController do described_class.any_instance.expects(:has_enough_space_on_disk?).returns(true) filename = 'test_Site-0123456789.tar.gz' + @paths = [backup_path(File.join('tmp', 'test', "#{filename}.part1"))] post "/admin/backups/upload.json", params: { resumableFilename: filename, @@ -241,13 +231,6 @@ RSpec.describe Admin::BackupsController do expect(response.status).to eq(200) expect(response.body).to eq("") - ensure - begin - File.delete( - File.join(Backup.base_directory, 'tmp', 'test', "#{filename}.part1") - ) - rescue Errno::ENOENT - end end end end @@ -284,18 +267,16 @@ RSpec.describe Admin::BackupsController do end describe "#email" do - let(:backup_filename) { "test.tar.gz" } - let(:backup) { Backup.new(backup_filename) } - it "enqueues email job" do - Backup.expects(:[]).with(backup_filename).returns(backup) + create_backup_files(backup_filename) - Jobs.expects(:enqueue).with(:download_backup_email, - user_id: admin.id, - backup_file_path: 'http://www.example.com/admin/backups/test.tar.gz' - ) + expect { + put "/admin/backups/#{backup_filename}.json" + }.to change { Jobs::DownloadBackupEmail.jobs.size }.by(1) - put "/admin/backups/#{backup_filename}.json" + job_args = Jobs::DownloadBackupEmail.jobs.last["args"].first + expect(job_args["user_id"]).to eq(admin.id) + expect(job_args["backup_file_path"]).to eq("http://www.example.com/admin/backups/#{backup_filename}") expect(response.status).to eq(200) end diff --git a/test/javascripts/lib/utilities-test.js.es6 b/test/javascripts/lib/utilities-test.js.es6 index ba2448b35c0..56804e0aca9 100644 --- a/test/javascripts/lib/utilities-test.js.es6 +++ b/test/javascripts/lib/utilities-test.js.es6 @@ -109,6 +109,14 @@ QUnit.test("ensures an authorized upload", assert => { ); }); +QUnit.test("skipping validation works", assert => { + const files = [{ name: "backup.tar.gz" }]; + sandbox.stub(bootbox, "alert"); + + assert.not(validUpload(files, { skipValidation: false })); + assert.ok(validUpload(files, { skipValidation: true })); +}); + QUnit.test("staff can upload anything in PM", assert => { const files = [{ name: "some.docx" }]; Discourse.SiteSettings.authorized_extensions = "jpeg";