new S3 backup layout (#9830)
* DEV: new S3 backup layout Currently, with $S3_BACKUP_BUCKET of "bucket/backups", multisite backups end up in "bucket/backups/backups/dbname/" and single-site will be in "bucket/backups/". Both _should_ be in "bucket/backups/dbname/" - remove MULTISITE_PREFIX, - always include dbname, - method to move to the new prefix - job to call the method * SPEC: add tests for `VacateLegacyPrefixBackups` onceoff job. Co-authored-by: Vinoth Kannan <vinothkannan@vinkas.com>
This commit is contained in:
parent
1509afefba
commit
74d28a43d1
|
@ -0,0 +1,10 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Jobs
|
||||
class VacateLegacyPrefixBackups < ::Jobs::Onceoff
|
||||
def execute_onceoff(args)
|
||||
args ||= {}
|
||||
BackupRestore::S3BackupStore.create(s3_options: args[:s3_options]).vacate_legacy_prefix if SiteSetting.backup_location == BackupLocationSiteSetting::S3
|
||||
end
|
||||
end
|
||||
end
|
|
@ -3,12 +3,11 @@
|
|||
module BackupRestore
|
||||
class S3BackupStore < BackupStore
|
||||
UPLOAD_URL_EXPIRES_AFTER_SECONDS ||= 21_600 # 6 hours
|
||||
MULTISITE_PREFIX = "backups"
|
||||
|
||||
def initialize(opts = {})
|
||||
s3_options = S3Helper.s3_options(SiteSetting)
|
||||
s3_options.merge!(opts[:s3_options]) if opts[:s3_options]
|
||||
@s3_helper = S3Helper.new(s3_bucket_name_with_prefix, '', s3_options)
|
||||
@s3_options = S3Helper.s3_options(SiteSetting)
|
||||
@s3_options.merge!(opts[:s3_options]) if opts[:s3_options]
|
||||
@s3_helper = S3Helper.new(s3_bucket_name_with_prefix, '', @s3_options.clone)
|
||||
end
|
||||
|
||||
def remote?
|
||||
|
@ -52,6 +51,20 @@ module BackupRestore
|
|||
raise StorageError
|
||||
end
|
||||
|
||||
def vacate_legacy_prefix
|
||||
legacy_s3_helper = S3Helper.new(s3_bucket_name_with_legacy_prefix, '', @s3_options.clone)
|
||||
legacy_keys = legacy_s3_helper.list.map { |o| o.key }
|
||||
legacy_keys.each do |legacy_key|
|
||||
bucket, prefix = s3_bucket_name_with_prefix.split('/', 2)
|
||||
@s3_helper.s3_client.copy_object({
|
||||
copy_source: File.join(bucket, legacy_key),
|
||||
bucket: bucket,
|
||||
key: File.join(prefix, legacy_key.split('/').last)
|
||||
})
|
||||
legacy_s3_helper.delete_object(legacy_key)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def unsorted_files
|
||||
|
@ -99,8 +112,12 @@ module BackupRestore
|
|||
end
|
||||
|
||||
def s3_bucket_name_with_prefix
|
||||
File.join(SiteSetting.s3_backup_bucket, RailsMultisite::ConnectionManagement.current_db)
|
||||
end
|
||||
|
||||
def s3_bucket_name_with_legacy_prefix
|
||||
if Rails.configuration.multisite
|
||||
File.join(SiteSetting.s3_backup_bucket, MULTISITE_PREFIX, RailsMultisite::ConnectionManagement.current_db)
|
||||
File.join(SiteSetting.s3_backup_bucket, "backups", RailsMultisite::ConnectionManagement.current_db)
|
||||
else
|
||||
SiteSetting.s3_backup_bucket
|
||||
end
|
||||
|
|
|
@ -63,7 +63,12 @@ class S3Helper
|
|||
|
||||
# delete the file
|
||||
s3_filename.prepend(multisite_upload_path) if Rails.configuration.multisite
|
||||
s3_bucket.object(get_path_for_s3_upload(s3_filename)).delete
|
||||
delete_object(get_path_for_s3_upload(s3_filename))
|
||||
rescue Aws::S3::Errors::NoSuchKey
|
||||
end
|
||||
|
||||
def delete_object(key)
|
||||
s3_bucket.object(key).delete
|
||||
rescue Aws::S3::Errors::NoSuchKey
|
||||
end
|
||||
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require "s3_helper"
|
||||
require 'rails_helper'
|
||||
|
||||
describe Jobs::VacateLegacyPrefixBackups, type: :multisite do
|
||||
let(:bucket_name) { "backupbucket" }
|
||||
|
||||
before(:all) do
|
||||
@s3_client = Aws::S3::Client.new(stub_responses: true)
|
||||
@s3_options = { client: @s3_client }
|
||||
@objects = []
|
||||
create_backups
|
||||
|
||||
@s3_client.stub_responses(:list_objects_v2, -> (context) do
|
||||
{ contents: objects_with_prefix(context) }
|
||||
end)
|
||||
end
|
||||
|
||||
before do
|
||||
SiteSetting.enable_s3_uploads = true
|
||||
SiteSetting.s3_access_key_id = "abc"
|
||||
SiteSetting.s3_secret_access_key = "def"
|
||||
SiteSetting.s3_backup_bucket = bucket_name
|
||||
SiteSetting.backup_location = BackupLocationSiteSetting::S3
|
||||
end
|
||||
|
||||
it "copies the backups from legacy path to new path" do
|
||||
@objects.each do |object|
|
||||
legacy_key = object[:key]
|
||||
legacy_object = @s3_client.get_object(bucket: bucket_name, key: legacy_key)
|
||||
|
||||
@s3_client.expects(:copy_object).with({
|
||||
copy_source: File.join(bucket_name, legacy_key),
|
||||
bucket: bucket_name,
|
||||
key: legacy_key.sub(/^backups\//, "")
|
||||
})
|
||||
|
||||
@s3_client.expects(:delete_object).with(bucket: bucket_name, key: legacy_key).returns(legacy_object)
|
||||
end
|
||||
|
||||
described_class.new.execute_onceoff(s3_options: @s3_options)
|
||||
end
|
||||
|
||||
def objects_with_prefix(context)
|
||||
prefix = context.params[:prefix]
|
||||
@objects.select { |obj| obj[:key].start_with?(prefix) }
|
||||
end
|
||||
|
||||
def create_backups
|
||||
@objects.clear
|
||||
|
||||
@objects << { key: "backups/default/b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z") }
|
||||
@objects << { key: "backups/default/filename.tar.gz", size: 17, last_modified: Time.parse("2019-10-18T17:20:00Z") }
|
||||
end
|
||||
end
|
|
@ -13,7 +13,7 @@ describe BackupRestore::S3BackupStore do
|
|||
@objects = []
|
||||
|
||||
def expected_prefix
|
||||
Rails.configuration.multisite ? "backups/#{RailsMultisite::ConnectionManagement.current_db}/" : ""
|
||||
"#{RailsMultisite::ConnectionManagement.current_db}/"
|
||||
end
|
||||
|
||||
def check_context(context)
|
||||
|
@ -102,26 +102,21 @@ describe BackupRestore::S3BackupStore do
|
|||
|
||||
def objects_with_prefix(context)
|
||||
prefix = context.params[:prefix]
|
||||
|
||||
if prefix.blank?
|
||||
@objects.reject { |obj| obj[:key].include?("backups/") }
|
||||
else
|
||||
@objects.select { |obj| obj[:key].start_with?(prefix) }
|
||||
end
|
||||
@objects.select { |obj| obj[:key].start_with?(prefix) }
|
||||
end
|
||||
|
||||
def create_backups
|
||||
@objects.clear
|
||||
|
||||
@objects << { key: "b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z") }
|
||||
@objects << { key: "a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z") }
|
||||
@objects << { key: "r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z") }
|
||||
@objects << { key: "no-backup.txt", size: 12, last_modified: Time.parse("2018-09-05T14:27:00Z") }
|
||||
@objects << { key: "subfolder/c.tar.gz", size: 23, last_modified: Time.parse("2019-01-24T18:44:00Z") }
|
||||
@objects << { key: "default/b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z") }
|
||||
@objects << { key: "default/a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z") }
|
||||
@objects << { key: "default/r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z") }
|
||||
@objects << { key: "default/no-backup.txt", size: 12, last_modified: Time.parse("2018-09-05T14:27:00Z") }
|
||||
@objects << { key: "default/subfolder/c.tar.gz", size: 23, last_modified: Time.parse("2019-01-24T18:44:00Z") }
|
||||
|
||||
@objects << { key: "backups/second/multi-2.tar.gz", size: 19, last_modified: Time.parse("2018-11-27T03:16:54Z") }
|
||||
@objects << { key: "backups/second/multi-1.tar.gz", size: 22, last_modified: Time.parse("2018-11-26T03:17:09Z") }
|
||||
@objects << { key: "backups/second/subfolder/multi-3.tar.gz", size: 23, last_modified: Time.parse("2019-01-24T18:44:00Z") }
|
||||
@objects << { key: "second/multi-2.tar.gz", size: 19, last_modified: Time.parse("2018-11-27T03:16:54Z") }
|
||||
@objects << { key: "second/multi-1.tar.gz", size: 22, last_modified: Time.parse("2018-11-26T03:17:09Z") }
|
||||
@objects << { key: "second/subfolder/multi-3.tar.gz", size: 23, last_modified: Time.parse("2019-01-24T18:44:00Z") }
|
||||
end
|
||||
|
||||
def remove_backups
|
||||
|
|
Loading…
Reference in New Issue