2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2018-10-14 21:43:31 -04:00
|
|
|
module BackupRestore
|
|
|
|
class S3BackupStore < BackupStore
|
|
|
|
UPLOAD_URL_EXPIRES_AFTER_SECONDS ||= 21_600 # 6 hours
|
|
|
|
|
|
|
|
def initialize(opts = {})
|
2020-05-28 14:58:23 -04:00
|
|
|
@s3_options = S3Helper.s3_options(SiteSetting)
|
|
|
|
@s3_options.merge!(opts[:s3_options]) if opts[:s3_options]
|
|
|
|
@s3_helper = S3Helper.new(s3_bucket_name_with_prefix, '', @s3_options.clone)
|
2018-10-14 21:43:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def remote?
|
|
|
|
true
|
|
|
|
end
|
|
|
|
|
|
|
|
def file(filename, include_download_source: false)
|
|
|
|
obj = @s3_helper.object(filename)
|
|
|
|
create_file_from_object(obj, include_download_source) if obj.exists?
|
|
|
|
end
|
|
|
|
|
|
|
|
def delete_file(filename)
|
|
|
|
obj = @s3_helper.object(filename)
|
2018-12-14 17:14:46 -05:00
|
|
|
|
|
|
|
if obj.exists?
|
|
|
|
obj.delete
|
|
|
|
reset_cache
|
|
|
|
end
|
2018-10-14 21:43:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def download_file(filename, destination_path, failure_message = nil)
|
2019-01-31 23:40:48 -05:00
|
|
|
@s3_helper.download_file(filename, destination_path, failure_message)
|
2018-10-14 21:43:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def upload_file(filename, source_path, content_type)
|
|
|
|
obj = @s3_helper.object(filename)
|
|
|
|
raise BackupFileExists.new if obj.exists?
|
|
|
|
|
|
|
|
obj.upload_file(source_path, content_type: content_type)
|
2018-12-14 17:14:46 -05:00
|
|
|
reset_cache
|
2018-10-14 21:43:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def generate_upload_url(filename)
|
|
|
|
obj = @s3_helper.object(filename)
|
|
|
|
raise BackupFileExists.new if obj.exists?
|
|
|
|
|
2021-11-07 18:16:38 -05:00
|
|
|
# TODO (martin) We can remove this at a later date when we move this
|
|
|
|
# ensure CORS for backups and direct uploads to a post-site-setting
|
|
|
|
# change event, so the rake task doesn't have to be run manually.
|
|
|
|
@s3_helper.ensure_cors!([S3CorsRulesets::BACKUP_DIRECT_UPLOAD])
|
|
|
|
|
2018-10-14 21:43:31 -04:00
|
|
|
presigned_url(obj, :put, UPLOAD_URL_EXPIRES_AFTER_SECONDS)
|
2019-02-20 09:15:38 -05:00
|
|
|
rescue Aws::Errors::ServiceError => e
|
|
|
|
Rails.logger.warn("Failed to generate upload URL for S3: #{e.message.presence || e.class.name}")
|
2021-03-02 09:29:37 -05:00
|
|
|
raise StorageError.new(e.message.presence || e.class.name)
|
2018-10-14 21:43:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def unsorted_files
|
|
|
|
objects = []
|
|
|
|
|
|
|
|
@s3_helper.list.each do |obj|
|
2019-01-24 15:58:36 -05:00
|
|
|
if obj.key.match?(file_regex)
|
2018-10-14 21:43:31 -04:00
|
|
|
objects << create_file_from_object(obj)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
objects
|
|
|
|
rescue Aws::Errors::ServiceError => e
|
|
|
|
Rails.logger.warn("Failed to list backups from S3: #{e.message.presence || e.class.name}")
|
2021-03-02 09:29:37 -05:00
|
|
|
raise StorageError.new(e.message.presence || e.class.name)
|
2018-10-14 21:43:31 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def create_file_from_object(obj, include_download_source = false)
|
2019-06-05 23:27:24 -04:00
|
|
|
expires = S3Helper::DOWNLOAD_URL_EXPIRES_AFTER_SECONDS
|
2018-10-14 21:43:31 -04:00
|
|
|
BackupFile.new(
|
|
|
|
filename: File.basename(obj.key),
|
|
|
|
size: obj.size,
|
|
|
|
last_modified: obj.last_modified,
|
2019-06-05 23:27:24 -04:00
|
|
|
source: include_download_source ? presigned_url(obj, :get, expires) : nil
|
2018-10-14 21:43:31 -04:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def presigned_url(obj, method, expires_in_seconds)
|
|
|
|
obj.presigned_url(method, expires_in: expires_in_seconds)
|
|
|
|
end
|
|
|
|
|
|
|
|
def cleanup_allowed?
|
|
|
|
!SiteSetting.s3_disable_cleanup
|
|
|
|
end
|
2018-12-04 21:10:39 -05:00
|
|
|
|
|
|
|
def s3_bucket_name_with_prefix
|
2020-05-28 14:58:23 -04:00
|
|
|
File.join(SiteSetting.s3_backup_bucket, RailsMultisite::ConnectionManagement.current_db)
|
|
|
|
end
|
|
|
|
|
2019-01-24 15:58:36 -05:00
|
|
|
def file_regex
|
|
|
|
@file_regex ||= begin
|
|
|
|
path = @s3_helper.s3_bucket_folder_path || ""
|
|
|
|
|
|
|
|
if path.present?
|
|
|
|
path = "#{path}/" unless path.end_with?("/")
|
|
|
|
path = Regexp.quote(path)
|
|
|
|
end
|
|
|
|
|
|
|
|
/^#{path}[^\/]*\.t?gz$/i
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-14 17:14:46 -05:00
|
|
|
def free_bytes
|
|
|
|
nil
|
|
|
|
end
|
2018-10-14 21:43:31 -04:00
|
|
|
end
|
|
|
|
end
|