2014-02-21 19:41:01 -05:00
|
|
|
module Jobs
|
|
|
|
|
|
|
|
class BackupChunksMerger < Jobs::Base
|
2018-01-31 06:05:06 -05:00
|
|
|
sidekiq_options queue: 'critical', retry: false
|
2014-02-21 19:41:01 -05:00
|
|
|
|
|
|
|
def execute(args)
|
|
|
|
filename = args[:filename]
|
|
|
|
identifier = args[:identifier]
|
|
|
|
chunks = args[:chunks].to_i
|
|
|
|
|
2014-03-18 20:05:47 -04:00
|
|
|
raise Discourse::InvalidParameters.new(:filename) if filename.blank?
|
2014-02-21 19:41:01 -05:00
|
|
|
raise Discourse::InvalidParameters.new(:identifier) if identifier.blank?
|
2014-03-18 20:05:47 -04:00
|
|
|
raise Discourse::InvalidParameters.new(:chunks) if chunks <= 0
|
2014-02-21 19:41:01 -05:00
|
|
|
|
2014-03-18 20:05:47 -04:00
|
|
|
backup_path = "#{Backup.base_directory}/#{filename}"
|
|
|
|
tmp_backup_path = "#{backup_path}.tmp"
|
2014-05-27 16:14:37 -04:00
|
|
|
# path to tmp directory
|
2014-03-18 20:05:47 -04:00
|
|
|
tmp_directory = File.dirname(Backup.chunk_path(identifier, filename, 0))
|
2014-05-27 16:14:37 -04:00
|
|
|
|
|
|
|
# merge all chunks
|
|
|
|
HandleChunkUpload.merge_chunks(chunks, upload_path: backup_path, tmp_upload_path: tmp_backup_path, model: Backup, identifier: identifier, filename: filename, tmp_directory: tmp_directory)
|
2018-01-31 06:05:06 -05:00
|
|
|
|
|
|
|
# push an updated list to the clients
|
|
|
|
data = ActiveModel::ArraySerializer.new(Backup.all, each_serializer: BackupSerializer).as_json
|
|
|
|
MessageBus.publish("/admin/backups", data, user_ids: User.staff.pluck(:id))
|
2014-02-21 19:41:01 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|