2019-05-02 02:57:12 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2019-10-02 00:01:53 -04:00
|
|
|
require "backup_restore"
|
2018-10-14 21:43:31 -04:00
|
|
|
require "backup_restore/backup_store"
|
2014-02-12 23:33:21 -05:00
|
|
|
|
|
|
|
class Admin::BackupsController < Admin::AdminController
|
2021-11-17 18:17:23 -05:00
|
|
|
include ExternalUploadHelpers
|
|
|
|
|
2017-12-21 15:21:28 -05:00
|
|
|
before_action :ensure_backups_enabled
|
2017-08-31 00:06:56 -04:00
|
|
|
skip_before_action :check_xhr, only: [:index, :show, :logs, :check_backup_chunk, :upload_backup_chunk]
|
2014-02-12 23:33:21 -05:00
|
|
|
|
|
|
|
def index
|
|
|
|
respond_to do |format|
|
|
|
|
format.html do
|
|
|
|
store_preloaded("operations_status", MultiJson.dump(BackupRestore.operations_status))
|
2014-02-13 13:41:46 -05:00
|
|
|
store_preloaded("logs", MultiJson.dump(BackupRestore.logs))
|
2014-02-12 23:33:21 -05:00
|
|
|
render "default/empty"
|
|
|
|
end
|
2018-10-14 21:43:31 -04:00
|
|
|
|
2014-02-12 23:33:21 -05:00
|
|
|
format.json do
|
2018-10-14 21:43:31 -04:00
|
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
|
|
|
|
begin
|
|
|
|
render_serialized(store.files, BackupFileSerializer)
|
|
|
|
rescue BackupRestore::BackupStore::StorageError => e
|
|
|
|
render_json_error(e)
|
|
|
|
end
|
2014-02-12 23:33:21 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def status
|
|
|
|
render_json_dump(BackupRestore.operations_status)
|
|
|
|
end
|
|
|
|
|
|
|
|
def create
|
2014-08-20 12:48:56 -04:00
|
|
|
opts = {
|
|
|
|
publish_to_message_bus: true,
|
2015-08-27 14:02:13 -04:00
|
|
|
with_uploads: params.fetch(:with_uploads) == "true",
|
|
|
|
client_id: params[:client_id],
|
2014-08-20 12:48:56 -04:00
|
|
|
}
|
|
|
|
BackupRestore.backup!(current_user.id, opts)
|
2014-02-12 23:33:21 -05:00
|
|
|
rescue BackupRestore::OperationRunningError
|
2019-02-20 09:15:38 -05:00
|
|
|
render_error("backup.operation_already_running")
|
2014-02-12 23:33:21 -05:00
|
|
|
else
|
2017-01-16 13:53:31 -05:00
|
|
|
StaffActionLogger.new(current_user).log_backup_create
|
2014-02-12 23:33:21 -05:00
|
|
|
render json: success_json
|
|
|
|
end
|
|
|
|
|
|
|
|
def cancel
|
|
|
|
BackupRestore.cancel!
|
|
|
|
rescue BackupRestore::OperationRunningError
|
2019-02-20 09:15:38 -05:00
|
|
|
render_error("backup.operation_already_running")
|
2014-02-12 23:33:21 -05:00
|
|
|
else
|
|
|
|
render json: success_json
|
|
|
|
end
|
|
|
|
|
2017-03-01 10:26:18 -05:00
|
|
|
def email
|
2018-10-14 21:43:31 -04:00
|
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
|
|
|
|
if store.file(params.fetch(:id)).present?
|
|
|
|
Jobs.enqueue(
|
|
|
|
:download_backup_email,
|
2017-12-17 22:25:22 -05:00
|
|
|
user_id: current_user.id,
|
|
|
|
backup_file_path: url_for(controller: 'backups', action: 'show')
|
|
|
|
)
|
|
|
|
|
2017-08-31 00:06:56 -04:00
|
|
|
render body: nil
|
2017-03-01 10:26:18 -05:00
|
|
|
else
|
2017-08-31 00:06:56 -04:00
|
|
|
render body: nil, status: 404
|
2017-03-01 10:26:18 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def show
|
|
|
|
if !EmailBackupToken.compare(current_user.id, params.fetch(:token))
|
|
|
|
@error = I18n.t('download_backup_mailer.no_token')
|
2021-04-21 05:36:32 -04:00
|
|
|
return render layout: 'no_ember', status: 422, formats: [:html]
|
2017-03-01 10:26:18 -05:00
|
|
|
end
|
2018-10-14 21:43:31 -04:00
|
|
|
|
|
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
|
|
|
|
if backup = store.file(params.fetch(:id), include_download_source: true)
|
2017-03-01 10:26:18 -05:00
|
|
|
EmailBackupToken.del(current_user.id)
|
2017-01-16 13:53:31 -05:00
|
|
|
StaffActionLogger.new(current_user).log_backup_download(backup)
|
2018-10-14 21:43:31 -04:00
|
|
|
|
|
|
|
if store.remote?
|
|
|
|
redirect_to backup.source
|
2017-03-01 10:26:18 -05:00
|
|
|
else
|
2018-10-14 21:43:31 -04:00
|
|
|
headers['Content-Length'] = File.size(backup.source).to_s
|
|
|
|
send_file backup.source
|
2017-03-01 10:26:18 -05:00
|
|
|
end
|
2018-10-14 21:43:31 -04:00
|
|
|
else
|
|
|
|
render body: nil, status: 404
|
2014-02-12 23:33:21 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def destroy
|
2018-10-14 21:43:31 -04:00
|
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
|
|
|
|
if backup = store.file(params.fetch(:id))
|
2017-01-16 13:53:31 -05:00
|
|
|
StaffActionLogger.new(current_user).log_backup_destroy(backup)
|
2018-10-14 21:43:31 -04:00
|
|
|
store.delete_file(backup.filename)
|
2017-08-31 00:06:56 -04:00
|
|
|
render body: nil
|
2014-03-11 17:28:12 -04:00
|
|
|
else
|
2017-08-31 00:06:56 -04:00
|
|
|
render body: nil, status: 404
|
2014-03-11 17:28:12 -04:00
|
|
|
end
|
2014-02-12 23:33:21 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def logs
|
|
|
|
store_preloaded("operations_status", MultiJson.dump(BackupRestore.operations_status))
|
2014-02-13 13:41:46 -05:00
|
|
|
store_preloaded("logs", MultiJson.dump(BackupRestore.logs))
|
2014-02-12 23:33:21 -05:00
|
|
|
render "default/empty"
|
|
|
|
end
|
|
|
|
|
|
|
|
def restore
|
2015-08-27 14:02:13 -04:00
|
|
|
opts = {
|
|
|
|
filename: params.fetch(:id),
|
|
|
|
client_id: params.fetch(:client_id),
|
|
|
|
publish_to_message_bus: true,
|
|
|
|
}
|
|
|
|
BackupRestore.restore!(current_user.id, opts)
|
2014-02-12 23:33:21 -05:00
|
|
|
rescue BackupRestore::OperationRunningError
|
2019-02-20 09:15:38 -05:00
|
|
|
render_error("backup.operation_already_running")
|
2014-02-12 23:33:21 -05:00
|
|
|
else
|
|
|
|
render json: success_json
|
|
|
|
end
|
|
|
|
|
|
|
|
def rollback
|
|
|
|
BackupRestore.rollback!
|
|
|
|
rescue BackupRestore::OperationRunningError
|
2019-02-20 09:15:38 -05:00
|
|
|
render_error("backup.operation_already_running")
|
2014-02-12 23:33:21 -05:00
|
|
|
else
|
|
|
|
render json: success_json
|
|
|
|
end
|
|
|
|
|
|
|
|
def readonly
|
|
|
|
enable = params.fetch(:enable).to_s == "true"
|
2017-01-11 03:38:07 -05:00
|
|
|
readonly_mode_key = Discourse::USER_READONLY_MODE_KEY
|
|
|
|
|
|
|
|
if enable
|
|
|
|
Discourse.enable_readonly_mode(readonly_mode_key)
|
|
|
|
else
|
|
|
|
Discourse.disable_readonly_mode(readonly_mode_key)
|
|
|
|
end
|
|
|
|
|
2017-01-11 01:46:48 -05:00
|
|
|
StaffActionLogger.new(current_user).log_change_readonly_mode(enable)
|
|
|
|
|
2017-08-31 00:06:56 -04:00
|
|
|
render body: nil
|
2014-02-12 23:33:21 -05:00
|
|
|
end
|
|
|
|
|
2014-05-27 16:14:37 -04:00
|
|
|
def check_backup_chunk
|
2018-10-14 21:43:31 -04:00
|
|
|
identifier = params.fetch(:resumableIdentifier)
|
|
|
|
filename = params.fetch(:resumableFilename)
|
|
|
|
chunk_number = params.fetch(:resumableChunkNumber)
|
2014-02-21 19:41:01 -05:00
|
|
|
current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i
|
|
|
|
|
2019-07-19 10:33:08 -04:00
|
|
|
raise Discourse::InvalidParameters.new(:resumableIdentifier) unless valid_filename?(identifier)
|
|
|
|
|
2014-02-21 19:41:01 -05:00
|
|
|
# path to chunk file
|
2018-10-14 21:43:31 -04:00
|
|
|
chunk = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number)
|
2014-05-27 16:14:37 -04:00
|
|
|
# check chunk upload status
|
|
|
|
status = HandleChunkUpload.check_chunk(chunk, current_chunk_size: current_chunk_size)
|
2014-02-21 19:41:01 -05:00
|
|
|
|
2017-08-31 00:06:56 -04:00
|
|
|
render body: nil, status: status
|
2014-02-21 19:41:01 -05:00
|
|
|
end
|
|
|
|
|
2014-05-27 16:14:37 -04:00
|
|
|
def upload_backup_chunk
|
2018-10-14 21:43:31 -04:00
|
|
|
filename = params.fetch(:resumableFilename)
|
2014-02-26 13:38:06 -05:00
|
|
|
total_size = params.fetch(:resumableTotalSize).to_i
|
2019-07-19 10:33:08 -04:00
|
|
|
identifier = params.fetch(:resumableIdentifier)
|
2014-02-26 13:38:06 -05:00
|
|
|
|
2019-07-19 10:33:08 -04:00
|
|
|
raise Discourse::InvalidParameters.new(:resumableIdentifier) unless valid_filename?(identifier)
|
2018-10-14 21:43:31 -04:00
|
|
|
return render status: 415, plain: I18n.t("backup.backup_file_should_be_tar_gz") unless valid_extension?(filename)
|
|
|
|
return render status: 415, plain: I18n.t("backup.not_enough_space_on_disk") unless has_enough_space_on_disk?(total_size)
|
|
|
|
return render status: 415, plain: I18n.t("backup.invalid_filename") unless valid_filename?(filename)
|
2014-02-21 19:41:01 -05:00
|
|
|
|
2018-10-14 21:43:31 -04:00
|
|
|
file = params.fetch(:file)
|
|
|
|
chunk_number = params.fetch(:resumableChunkNumber).to_i
|
|
|
|
chunk_size = params.fetch(:resumableChunkSize).to_i
|
2014-02-21 19:41:01 -05:00
|
|
|
current_chunk_size = params.fetch(:resumableCurrentChunkSize).to_i
|
2021-11-15 00:08:21 -05:00
|
|
|
previous_chunk_number = chunk_number - 1
|
2014-02-21 19:41:01 -05:00
|
|
|
|
2018-10-14 21:43:31 -04:00
|
|
|
chunk = BackupRestore::LocalBackupStore.chunk_path(identifier, filename, chunk_number)
|
2014-05-27 16:14:37 -04:00
|
|
|
HandleChunkUpload.upload_chunk(chunk, file: file)
|
2014-02-21 19:41:01 -05:00
|
|
|
|
|
|
|
# when all chunks are uploaded
|
2021-11-22 17:45:42 -05:00
|
|
|
uploaded_file_size = previous_chunk_number * chunk_size
|
2014-02-21 19:41:01 -05:00
|
|
|
if uploaded_file_size + current_chunk_size >= total_size
|
|
|
|
# merge all the chunks in a background thread
|
2016-03-03 06:01:13 -05:00
|
|
|
Jobs.enqueue_in(5.seconds, :backup_chunks_merger, filename: filename, identifier: identifier, chunks: chunk_number)
|
2014-02-21 19:41:01 -05:00
|
|
|
end
|
|
|
|
|
2017-08-31 00:06:56 -04:00
|
|
|
render body: nil
|
2014-02-21 19:41:01 -05:00
|
|
|
end
|
|
|
|
|
2018-10-14 21:43:31 -04:00
|
|
|
def create_upload_url
|
|
|
|
params.require(:filename)
|
|
|
|
filename = params.fetch(:filename)
|
|
|
|
|
2019-02-20 09:15:38 -05:00
|
|
|
return render_json_error(I18n.t("backup.backup_file_should_be_tar_gz")) unless valid_extension?(filename)
|
|
|
|
return render_json_error(I18n.t("backup.invalid_filename")) unless valid_filename?(filename)
|
2018-10-14 21:43:31 -04:00
|
|
|
|
|
|
|
store = BackupRestore::BackupStore.create
|
|
|
|
|
|
|
|
begin
|
|
|
|
upload_url = store.generate_upload_url(filename)
|
|
|
|
rescue BackupRestore::BackupStore::BackupFileExists
|
2019-12-03 07:16:06 -05:00
|
|
|
return render_json_error(I18n.t("backup.file_exists"))
|
2019-02-20 09:15:38 -05:00
|
|
|
rescue BackupRestore::BackupStore::StorageError => e
|
|
|
|
return render_json_error(e)
|
2018-10-14 21:43:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
render json: success_json.merge(url: upload_url)
|
|
|
|
end
|
|
|
|
|
2014-02-26 13:38:06 -05:00
|
|
|
private
|
|
|
|
|
|
|
|
def has_enough_space_on_disk?(size)
|
2020-02-17 23:13:09 -05:00
|
|
|
DiskSpace.free("#{Rails.root}/public/backups") > size
|
2014-02-26 13:38:06 -05:00
|
|
|
end
|
|
|
|
|
2017-12-21 15:21:28 -05:00
|
|
|
def ensure_backups_enabled
|
|
|
|
raise Discourse::InvalidAccess.new unless SiteSetting.enable_backups?
|
|
|
|
end
|
|
|
|
|
2018-10-14 21:43:31 -04:00
|
|
|
def valid_extension?(filename)
|
|
|
|
/\.(tar\.gz|t?gz)$/i =~ filename
|
|
|
|
end
|
|
|
|
|
|
|
|
def valid_filename?(filename)
|
|
|
|
!!(/^[a-zA-Z0-9\._\-]+$/ =~ filename)
|
|
|
|
end
|
|
|
|
|
|
|
|
def render_error(message_key)
|
|
|
|
render json: failed_json.merge(message: I18n.t(message_key))
|
|
|
|
end
|
2021-11-17 18:17:23 -05:00
|
|
|
|
|
|
|
def validate_before_create_multipart(file_name:, file_size:, upload_type:)
|
|
|
|
raise ExternalUploadHelpers::ExternalUploadValidationError.new(I18n.t("backup.backup_file_should_be_tar_gz")) unless valid_extension?(file_name)
|
|
|
|
raise ExternalUploadHelpers::ExternalUploadValidationError.new(I18n.t("backup.invalid_filename")) unless valid_filename?(file_name)
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.serialize_upload(_upload)
|
|
|
|
{} # noop, the backup does not create an upload record
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_direct_multipart_upload
|
|
|
|
begin
|
|
|
|
yield
|
|
|
|
rescue BackupRestore::BackupStore::StorageError => err
|
|
|
|
message = debug_upload_error(err, I18n.t("upload.create_multipart_failure", additional_detail: err.message))
|
|
|
|
raise ExternalUploadHelpers::ExternalUploadValidationError.new(message)
|
|
|
|
rescue BackupRestore::BackupStore::BackupFileExists
|
|
|
|
raise ExternalUploadHelpers::ExternalUploadValidationError.new(I18n.t("backup.file_exists"))
|
|
|
|
end
|
|
|
|
end
|
2014-02-12 23:33:21 -05:00
|
|
|
end
|