FEATURE: Add hidden setting to include S3 uploads in backups

This commit is contained in:
Gerhard Schlager 2019-07-01 20:38:36 +02:00
parent 9f5cfa192e
commit f2dc59d61f
3 changed files with 65 additions and 7 deletions

View File

@ -1632,6 +1632,10 @@ backups:
include_thumbnails_in_backups:
default: false
shadowed_by_global: true
include_s3_uploads_in_backups:
default: false
shadowed_by_global: true
hidden: true
search:
min_search_term_length:

View File

@ -1,6 +1,7 @@
# frozen_string_literal: true
require "mini_mime"
require "file_store/s3_store"
module BackupRestore
@ -234,9 +235,25 @@ module BackupRestore
)
end
if SiteSetting.Upload.enable_s3_uploads
add_remote_uploads_to_archive(tar_filename)
else
add_local_uploads_to_archive(tar_filename)
end
remove_tmp_directory
log "Gzipping archive, this may take a while..."
Discourse::Utils.execute_command(
'gzip', "-#{SiteSetting.backup_gzip_compression_level_for_uploads}", tar_filename,
failure_message: "Failed to gzip archive."
)
end
def add_local_uploads_to_archive(tar_filename)
log "Archiving uploads..."
upload_directory = "uploads/" + @current_db
log "Archiving uploads..."
FileUtils.cd(File.join(Rails.root, "public")) do
if File.directory?(upload_directory)
exclude_optimized = SiteSetting.include_thumbnails_in_backups ? '' : "--exclude=#{upload_directory}/optimized"
@ -249,14 +266,47 @@ module BackupRestore
log "No uploads found, skipping archiving uploads..."
end
end
end
remove_tmp_directory
def add_remote_uploads_to_archive(tar_filename)
if !SiteSetting.include_s3_uploads_in_backups
log "Skipping uploads stored on S3."
return
end
log "Gzipping archive, this may take a while..."
Discourse::Utils.execute_command(
'gzip', "-#{SiteSetting.backup_gzip_compression_level_for_uploads}", tar_filename,
failure_message: "Failed to gzip archive."
)
log "Downloading uploads from S3. This may take a while..."
store = FileStore::S3Store.new
upload_directory = File.join("uploads", @current_db)
count = 0
FileUtils.cd(@tmp_directory) do
Upload.find_each do |upload|
next if upload.local?
filename = File.join(@tmp_directory, upload_directory, store.get_path_for_upload(upload))
begin
FileUtils.mkdir_p(File.dirname(filename))
store.download_file(upload, filename)
rescue StandardError => ex
log "Failed to download file with upload ID #{upload.id} from S3", ex
end
if File.exists?(filename)
Discourse::Utils.execute_command(
'tar', '--append', '--file', tar_filename, upload_directory,
failure_message: "Failed to add #{upload.original_filename} to archive.", success_status_codes: [0, 1]
)
File.delete(filename)
end
count += 1
log "#{count} files have already been downloaded. Still downloading..." if count % 500 == 0
end
end
log "No uploads found, skipping archiving uploads..." if count == 0
end
def upload_archive

View File

@ -167,6 +167,10 @@ module FileStore
end
end
def download_file(upload, destination_path)
@s3_helper.download_file(get_upload_key(upload), destination_path)
end
private
def get_upload_key(upload)