diff --git a/app/jobs/onceoff/vacate_legacy_prefix_backups.rb b/app/jobs/onceoff/vacate_legacy_prefix_backups.rb new file mode 100644 index 00000000000..0791a01c0d1 --- /dev/null +++ b/app/jobs/onceoff/vacate_legacy_prefix_backups.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +module Jobs + class VacateLegacyPrefixBackups < ::Jobs::Onceoff + def execute_onceoff(args) + args ||= {} + BackupRestore::S3BackupStore.create(s3_options: args[:s3_options]).vacate_legacy_prefix if SiteSetting.backup_location == BackupLocationSiteSetting::S3 + end + end +end diff --git a/lib/backup_restore/s3_backup_store.rb b/lib/backup_restore/s3_backup_store.rb index 2c1bfc0c6d4..108cb661ba7 100644 --- a/lib/backup_restore/s3_backup_store.rb +++ b/lib/backup_restore/s3_backup_store.rb @@ -3,12 +3,11 @@ module BackupRestore class S3BackupStore < BackupStore UPLOAD_URL_EXPIRES_AFTER_SECONDS ||= 21_600 # 6 hours - MULTISITE_PREFIX = "backups" def initialize(opts = {}) - s3_options = S3Helper.s3_options(SiteSetting) - s3_options.merge!(opts[:s3_options]) if opts[:s3_options] - @s3_helper = S3Helper.new(s3_bucket_name_with_prefix, '', s3_options) + @s3_options = S3Helper.s3_options(SiteSetting) + @s3_options.merge!(opts[:s3_options]) if opts[:s3_options] + @s3_helper = S3Helper.new(s3_bucket_name_with_prefix, '', @s3_options.clone) end def remote? @@ -52,6 +51,20 @@ module BackupRestore raise StorageError end + def vacate_legacy_prefix + legacy_s3_helper = S3Helper.new(s3_bucket_name_with_legacy_prefix, '', @s3_options.clone) + legacy_keys = legacy_s3_helper.list.map { |o| o.key } + legacy_keys.each do |legacy_key| + bucket, prefix = s3_bucket_name_with_prefix.split('/', 2) + @s3_helper.s3_client.copy_object({ + copy_source: File.join(bucket, legacy_key), + bucket: bucket, + key: File.join(prefix, legacy_key.split('/').last) + }) + legacy_s3_helper.delete_object(legacy_key) + end + end + private def unsorted_files @@ -99,8 +112,12 @@ module BackupRestore end def s3_bucket_name_with_prefix + File.join(SiteSetting.s3_backup_bucket, RailsMultisite::ConnectionManagement.current_db) + end + + def s3_bucket_name_with_legacy_prefix if Rails.configuration.multisite - File.join(SiteSetting.s3_backup_bucket, MULTISITE_PREFIX, RailsMultisite::ConnectionManagement.current_db) + File.join(SiteSetting.s3_backup_bucket, "backups", RailsMultisite::ConnectionManagement.current_db) else SiteSetting.s3_backup_bucket end diff --git a/lib/s3_helper.rb b/lib/s3_helper.rb index 8021c781858..865e9b4e6f1 100644 --- a/lib/s3_helper.rb +++ b/lib/s3_helper.rb @@ -63,7 +63,12 @@ class S3Helper # delete the file s3_filename.prepend(multisite_upload_path) if Rails.configuration.multisite - s3_bucket.object(get_path_for_s3_upload(s3_filename)).delete + delete_object(get_path_for_s3_upload(s3_filename)) + rescue Aws::S3::Errors::NoSuchKey + end + + def delete_object(key) + s3_bucket.object(key).delete rescue Aws::S3::Errors::NoSuchKey end diff --git a/spec/jobs/vacate_legacy_prefix_backups_spec.rb b/spec/jobs/vacate_legacy_prefix_backups_spec.rb new file mode 100644 index 00000000000..dc759792b50 --- /dev/null +++ b/spec/jobs/vacate_legacy_prefix_backups_spec.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +require "s3_helper" +require 'rails_helper' + +describe Jobs::VacateLegacyPrefixBackups, type: :multisite do + let(:bucket_name) { "backupbucket" } + + before(:all) do + @s3_client = Aws::S3::Client.new(stub_responses: true) + @s3_options = { client: @s3_client } + @objects = [] + create_backups + + @s3_client.stub_responses(:list_objects_v2, -> (context) do + { contents: objects_with_prefix(context) } + end) + end + + before do + SiteSetting.enable_s3_uploads = true + SiteSetting.s3_access_key_id = "abc" + SiteSetting.s3_secret_access_key = "def" + SiteSetting.s3_backup_bucket = bucket_name + SiteSetting.backup_location = BackupLocationSiteSetting::S3 + end + + it "copies the backups from legacy path to new path" do + @objects.each do |object| + legacy_key = object[:key] + legacy_object = @s3_client.get_object(bucket: bucket_name, key: legacy_key) + + @s3_client.expects(:copy_object).with({ + copy_source: File.join(bucket_name, legacy_key), + bucket: bucket_name, + key: legacy_key.sub(/^backups\//, "") + }) + + @s3_client.expects(:delete_object).with(bucket: bucket_name, key: legacy_key).returns(legacy_object) + end + + described_class.new.execute_onceoff(s3_options: @s3_options) + end + + def objects_with_prefix(context) + prefix = context.params[:prefix] + @objects.select { |obj| obj[:key].start_with?(prefix) } + end + + def create_backups + @objects.clear + + @objects << { key: "backups/default/b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z") } + @objects << { key: "backups/default/filename.tar.gz", size: 17, last_modified: Time.parse("2019-10-18T17:20:00Z") } + end +end diff --git a/spec/lib/backup_restore/s3_backup_store_spec.rb b/spec/lib/backup_restore/s3_backup_store_spec.rb index af5a7ca19cf..5f6745ff15a 100644 --- a/spec/lib/backup_restore/s3_backup_store_spec.rb +++ b/spec/lib/backup_restore/s3_backup_store_spec.rb @@ -13,7 +13,7 @@ describe BackupRestore::S3BackupStore do @objects = [] def expected_prefix - Rails.configuration.multisite ? "backups/#{RailsMultisite::ConnectionManagement.current_db}/" : "" + "#{RailsMultisite::ConnectionManagement.current_db}/" end def check_context(context) @@ -102,26 +102,21 @@ describe BackupRestore::S3BackupStore do def objects_with_prefix(context) prefix = context.params[:prefix] - - if prefix.blank? - @objects.reject { |obj| obj[:key].include?("backups/") } - else - @objects.select { |obj| obj[:key].start_with?(prefix) } - end + @objects.select { |obj| obj[:key].start_with?(prefix) } end def create_backups @objects.clear - @objects << { key: "b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z") } - @objects << { key: "a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z") } - @objects << { key: "r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z") } - @objects << { key: "no-backup.txt", size: 12, last_modified: Time.parse("2018-09-05T14:27:00Z") } - @objects << { key: "subfolder/c.tar.gz", size: 23, last_modified: Time.parse("2019-01-24T18:44:00Z") } + @objects << { key: "default/b.tar.gz", size: 17, last_modified: Time.parse("2018-09-13T15:10:00Z") } + @objects << { key: "default/a.tgz", size: 29, last_modified: Time.parse("2018-02-11T09:27:00Z") } + @objects << { key: "default/r.sql.gz", size: 11, last_modified: Time.parse("2017-12-20T03:48:00Z") } + @objects << { key: "default/no-backup.txt", size: 12, last_modified: Time.parse("2018-09-05T14:27:00Z") } + @objects << { key: "default/subfolder/c.tar.gz", size: 23, last_modified: Time.parse("2019-01-24T18:44:00Z") } - @objects << { key: "backups/second/multi-2.tar.gz", size: 19, last_modified: Time.parse("2018-11-27T03:16:54Z") } - @objects << { key: "backups/second/multi-1.tar.gz", size: 22, last_modified: Time.parse("2018-11-26T03:17:09Z") } - @objects << { key: "backups/second/subfolder/multi-3.tar.gz", size: 23, last_modified: Time.parse("2019-01-24T18:44:00Z") } + @objects << { key: "second/multi-2.tar.gz", size: 19, last_modified: Time.parse("2018-11-27T03:16:54Z") } + @objects << { key: "second/multi-1.tar.gz", size: 22, last_modified: Time.parse("2018-11-26T03:17:09Z") } + @objects << { key: "second/subfolder/multi-3.tar.gz", size: 23, last_modified: Time.parse("2019-01-24T18:44:00Z") } end def remove_backups