diff --git a/app/controllers/uploads_controller.rb b/app/controllers/uploads_controller.rb index 142b6a00b8b..b95b33b5c04 100644 --- a/app/controllers/uploads_controller.rb +++ b/app/controllers/uploads_controller.rb @@ -224,22 +224,15 @@ class UploadsController < ApplicationController ) end - metadata = parse_allowed_metadata(params[:metadata]) - - url = Discourse.store.signed_url_for_temporary_upload( - file_name, metadata: metadata - ) - key = Discourse.store.path_from_url(url) - - upload_stub = ExternalUploadStub.create!( - key: key, - created_by: current_user, - original_filename: file_name, + external_upload_data = ExternalUploadManager.create_direct_upload( + current_user: current_user, + file_name: file_name, + file_size: file_size, upload_type: type, - filesize: file_size + metadata: parse_allowed_metadata(params[:metadata]) ) - render json: { url: url, key: key, unique_identifier: upload_stub.unique_identifier } + render json: external_upload_data end def complete_external_upload @@ -307,7 +300,6 @@ class UploadsController < ApplicationController file_name = params.require(:file_name) file_size = params.require(:file_size).to_i upload_type = params.require(:upload_type) - content_type = MiniMime.lookup_by_filename(file_name)&.content_type if file_size_too_big?(file_name, file_size) return render_json_error( @@ -316,11 +308,13 @@ class UploadsController < ApplicationController ) end - metadata = parse_allowed_metadata(params[:metadata]) - begin - multipart_upload = Discourse.store.create_multipart( - file_name, content_type, metadata: metadata + external_upload_data = ExternalUploadManager.create_direct_multipart_upload( + current_user: current_user, + file_name: file_name, + file_size: file_size, + upload_type: upload_type, + metadata: parse_allowed_metadata(params[:metadata]) ) rescue Aws::S3::Errors::ServiceError => err return render_json_error( @@ -329,21 +323,7 @@ class UploadsController < ApplicationController ) end - upload_stub = ExternalUploadStub.create!( - key: multipart_upload[:key], - created_by: current_user, - original_filename: file_name, - upload_type: upload_type, - external_upload_identifier: multipart_upload[:upload_id], - multipart: true, - filesize: file_size - ) - - render json: { - external_upload_identifier: upload_stub.external_upload_identifier, - key: upload_stub.key, - unique_identifier: upload_stub.unique_identifier - } + render json: external_upload_data end def batch_presign_multipart_parts diff --git a/app/services/external_upload_manager.rb b/app/services/external_upload_manager.rb index e5c032b4682..d4357238489 100644 --- a/app/services/external_upload_manager.rb +++ b/app/services/external_upload_manager.rb @@ -20,6 +20,48 @@ class ExternalUploadManager Discourse.redis.get("#{BAN_USER_REDIS_PREFIX}#{user.id}") == "1" end + def self.create_direct_upload(current_user:, file_name:, file_size:, upload_type:, metadata: {}) + url = Discourse.store.signed_url_for_temporary_upload( + file_name, metadata: metadata + ) + key = Discourse.store.path_from_url(url) + + upload_stub = ExternalUploadStub.create!( + key: key, + created_by: current_user, + original_filename: file_name, + upload_type: upload_type, + filesize: file_size + ) + + { url: url, key: key, unique_identifier: upload_stub.unique_identifier } + end + + def self.create_direct_multipart_upload( + current_user:, file_name:, file_size:, upload_type:, metadata: {} + ) + content_type = MiniMime.lookup_by_filename(file_name)&.content_type + multipart_upload = Discourse.store.create_multipart( + file_name, content_type, metadata: metadata + ) + + upload_stub = ExternalUploadStub.create!( + key: multipart_upload[:key], + created_by: current_user, + original_filename: file_name, + upload_type: upload_type, + external_upload_identifier: multipart_upload[:upload_id], + multipart: true, + filesize: file_size + ) + + { + external_upload_identifier: upload_stub.external_upload_identifier, + key: upload_stub.key, + unique_identifier: upload_stub.unique_identifier + } + end + def initialize(external_upload_stub, upload_create_opts = {}) @external_upload_stub = external_upload_stub @upload_create_opts = upload_create_opts diff --git a/lib/backup_restore/s3_backup_store.rb b/lib/backup_restore/s3_backup_store.rb index cb2fcd72dea..016a2bf35e9 100644 --- a/lib/backup_restore/s3_backup_store.rb +++ b/lib/backup_restore/s3_backup_store.rb @@ -44,7 +44,11 @@ module BackupRestore obj = @s3_helper.object(filename) raise BackupFileExists.new if obj.exists? - ensure_cors! + # TODO (martin) We can remove this at a later date when we move this + # ensure CORS for backups and direct uploads to a post-site-setting + # change event, so the rake task doesn't have to be run manually. + @s3_helper.ensure_cors!([S3CorsRulesets::BACKUP_DIRECT_UPLOAD]) + presigned_url(obj, :put, UPLOAD_URL_EXPIRES_AFTER_SECONDS) rescue Aws::Errors::ServiceError => e Rails.logger.warn("Failed to generate upload URL for S3: #{e.message.presence || e.class.name}") @@ -82,10 +86,6 @@ module BackupRestore obj.presigned_url(method, expires_in: expires_in_seconds) end - def ensure_cors! - @s3_helper.ensure_cors!([S3CorsRulesets::BACKUP_DIRECT_UPLOAD]) - end - def cleanup_allowed? !SiteSetting.s3_disable_cleanup end diff --git a/lib/s3_cors_rulesets.rb b/lib/s3_cors_rulesets.rb index 88f76f33527..d982d18d0f4 100644 --- a/lib/s3_cors_rulesets.rb +++ b/lib/s3_cors_rulesets.rb @@ -1,5 +1,7 @@ # frozen_string_literal: true +require_dependency "s3_helper" + class S3CorsRulesets ASSETS = { allowed_headers: ["Authorization"], @@ -10,8 +12,59 @@ class S3CorsRulesets BACKUP_DIRECT_UPLOAD = { allowed_headers: ["*"], - allowed_methods: ["PUT"], - allowed_origins: [Discourse.base_url_no_prefix], + expose_headers: ["ETag"], + allowed_methods: ["GET", "HEAD", "PUT"], + allowed_origins: ["*"], max_age_seconds: 3000 }.freeze + + DIRECT_UPLOAD = { + allowed_headers: ["Authorization", "Content-Disposition", "Content-Type"], + expose_headers: ["ETag"], + allowed_methods: ["GET", "HEAD", "PUT"], + allowed_origins: ["*"], + max_age_seconds: 3000 + }.freeze + + ## + # Used by the s3:ensure_cors_rules rake task to make sure the + # relevant CORS rules are applied to allow for direct uploads to + # S3, and in the case of assets rules so there are fonts and other + # public assets for the site loaded correctly. + # + # The use_db_s3_config param comes from ENV, and if the S3 client + # is not provided it is initialized by the S3Helper. + def self.sync(use_db_s3_config:, s3_client: nil) + return if !SiteSetting.s3_install_cors_rule + return if !(GlobalSetting.use_s3? || SiteSetting.enable_s3_uploads) + + assets_rules_applied = false + backup_rules_applied = false + direct_upload_rules_applied = false + + s3_helper = S3Helper.build_from_config( + s3_client: s3_client, use_db_s3_config: use_db_s3_config + ) + puts "Attempting to apply ASSETS S3 CORS ruleset in bucket #{s3_helper.s3_bucket_name}." + assets_rules_applied = s3_helper.ensure_cors!([S3CorsRulesets::ASSETS]) + + if SiteSetting.enable_backups? && SiteSetting.backup_location == BackupLocationSiteSetting::S3 + backup_s3_helper = S3Helper.build_from_config( + s3_client: s3_client, use_db_s3_config: use_db_s3_config, for_backup: true + ) + puts "Attempting to apply BACKUP_DIRECT_UPLOAD S3 CORS ruleset in bucket #{backup_s3_helper.s3_bucket_name}." + backup_rules_applied = backup_s3_helper.ensure_cors!([S3CorsRulesets::BACKUP_DIRECT_UPLOAD]) + end + + if SiteSetting.enable_direct_s3_uploads + puts "Attempting to apply DIRECT_UPLOAD S3 CORS ruleset in bucket #{s3_helper.s3_bucket_name}." + direct_upload_rules_applied = s3_helper.ensure_cors!([S3CorsRulesets::DIRECT_UPLOAD]) + end + + { + assets_rules_applied: assets_rules_applied, + backup_rules_applied: backup_rules_applied, + direct_upload_rules_applied: direct_upload_rules_applied + } + end end diff --git a/lib/s3_helper.rb b/lib/s3_helper.rb index 253269b5d9a..7c5ae18296e 100644 --- a/lib/s3_helper.rb +++ b/lib/s3_helper.rb @@ -40,6 +40,21 @@ class S3Helper end end + def self.build_from_config(use_db_s3_config: false, for_backup: false, s3_client: nil) + setting_klass = use_db_s3_config ? SiteSetting : GlobalSetting + options = S3Helper.s3_options(setting_klass) + options[:client] = s3_client if s3_client.present? + + bucket = + if for_backup + setting_klass.s3_backup_bucket + else + use_db_s3_config ? SiteSetting.s3_upload_bucket : GlobalSetting.s3_bucket + end + + S3Helper.new(bucket.downcase, '', options) + end + def self.get_bucket_and_folder_path(s3_bucket_name) s3_bucket_name.downcase.split("/", 2) end @@ -124,31 +139,36 @@ class S3Helper [destination, response.copy_object_result.etag.gsub('"', '')] end - # make sure we have a cors config for assets - # otherwise we will have no fonts + # Several places in the application need certain CORS rules to exist + # inside an S3 bucket so requests to the bucket can be made + # directly from the browser. The s3:ensure_cors_rules rake task + # is used to ensure these rules exist for assets, S3 backups, and + # direct S3 uploads, depending on configuration. def ensure_cors!(rules = nil) return unless SiteSetting.s3_install_cors_rule + rules = [rules] if !rules.is_a?(Array) + existing_rules = fetch_bucket_cors_rules - rule = nil + new_rules = rules - existing_rules + return false if new_rules.empty? + + final_rules = existing_rules + new_rules begin - rule = s3_resource.client.get_bucket_cors( - bucket: @s3_bucket_name - ).cors_rules&.first - rescue Aws::S3::Errors::NoSuchCORSConfiguration - # no rule - end - - unless rule - rules = [S3CorsRulesets::ASSETS] if rules.nil? - s3_resource.client.put_bucket_cors( bucket: @s3_bucket_name, cors_configuration: { - cors_rules: rules + cors_rules: final_rules } ) + rescue Aws::S3::Errors::AccessDenied => err + # TODO (martin) Remove this warning log level once we are sure this new + # ensure_cors! rule is functioning correctly. + Discourse.warn_exception(err, message: "Could not PutBucketCors rules for #{@s3_bucket_name}, rules: #{final_rules}") + return false end + + true end def update_lifecycle(id, days, prefix: nil, tag: nil) @@ -264,6 +284,17 @@ class S3Helper private + def fetch_bucket_cors_rules + begin + s3_resource.client.get_bucket_cors( + bucket: @s3_bucket_name + ).cors_rules&.map(&:to_h) || [] + rescue Aws::S3::Errors::NoSuchCORSConfiguration + # no rule + [] + end + end + def default_s3_options if SiteSetting.enable_s3_uploads? options = self.class.s3_options(SiteSetting) diff --git a/lib/tasks/s3.rake b/lib/tasks/s3.rake index e775e5c260e..65b738d064b 100644 --- a/lib/tasks/s3.rake +++ b/lib/tasks/s3.rake @@ -46,22 +46,7 @@ def use_db_s3_config end def helper - @helper ||= begin - bucket, options = - if use_db_s3_config - [ - SiteSetting.s3_upload_bucket.downcase, - S3Helper.s3_options(SiteSetting) - ] - else - [ - GlobalSetting.s3_bucket.downcase, - S3Helper.s3_options(GlobalSetting) - ] - end - - S3Helper.new(bucket, '', options) - end + @helper ||= S3Helper.build_from_config(use_db_s3_config: use_db_s3_config) end def assets @@ -186,12 +171,30 @@ task 's3:correct_cachecontrol' => :environment do end -task 's3:upload_assets' => :environment do +task 's3:ensure_cors_rules' => :environment do ensure_s3_configured! - puts "installing CORS rule" - helper.ensure_cors! + puts "Installing CORS rules..." + result = S3CorsRulesets.sync(use_db_s3_config: use_db_s3_config) + if result[:assets_rules_applied] + puts "Assets rules did not exist and were applied." + else + puts "Assets rules already existed." + end + if result[:backup_rules_applied] + puts "Backup rules did not exist and were applied." + else + puts "Backup rules already existed." + end + if result[:direct_upload_rules_applied] + puts "Direct upload rules did not exist and were applied." + else + puts "Direct upload rules already existed." + end +end + +task 's3:upload_assets' => [:environment, 's3:ensure_cors_rules'] do assets.each do |asset| upload(*asset) end diff --git a/spec/components/s3_helper_spec.rb b/spec/components/s3_helper_spec.rb index 39df6ffeeca..d70164728df 100644 --- a/spec/components/s3_helper_spec.rb +++ b/spec/components/s3_helper_spec.rb @@ -147,7 +147,7 @@ describe "S3Helper" do cors_rules: [S3CorsRulesets::ASSETS] } ) - s3_helper.ensure_cors! + s3_helper.ensure_cors!([S3CorsRulesets::ASSETS]) end it "does nothing if a rule already exists" do @@ -155,15 +155,33 @@ describe "S3Helper" do cors_rules: [S3CorsRulesets::ASSETS] }) s3_helper.s3_client.expects(:put_bucket_cors).never - s3_helper.ensure_cors! + s3_helper.ensure_cors!([S3CorsRulesets::ASSETS]) end - it "does not apply the passed in rules if a different rule already exists" do + it "applies the passed in rule if a different rule already exists" do s3_helper.s3_client.stub_responses(:get_bucket_cors, { cors_rules: [S3CorsRulesets::ASSETS] }) - s3_helper.s3_client.expects(:put_bucket_cors).never + s3_helper.s3_client.expects(:put_bucket_cors).with( + bucket: s3_helper.s3_bucket_name, + cors_configuration: { + cors_rules: [S3CorsRulesets::ASSETS, S3CorsRulesets::BACKUP_DIRECT_UPLOAD] + } + ) s3_helper.ensure_cors!([S3CorsRulesets::BACKUP_DIRECT_UPLOAD]) end + + it "returns false if the CORS rules do not get applied from an error" do + s3_helper.s3_client.stub_responses(:get_bucket_cors, { + cors_rules: [S3CorsRulesets::ASSETS] + }) + s3_helper.s3_client.expects(:put_bucket_cors).with( + bucket: s3_helper.s3_bucket_name, + cors_configuration: { + cors_rules: [S3CorsRulesets::ASSETS, S3CorsRulesets::BACKUP_DIRECT_UPLOAD] + } + ).raises(Aws::S3::Errors::AccessDenied.new("test", "test", {})) + expect(s3_helper.ensure_cors!([S3CorsRulesets::BACKUP_DIRECT_UPLOAD])).to eq(false) + end end end diff --git a/spec/lib/s3_cors_rulesets_spec.rb b/spec/lib/s3_cors_rulesets_spec.rb new file mode 100644 index 00000000000..cbcd95551a8 --- /dev/null +++ b/spec/lib/s3_cors_rulesets_spec.rb @@ -0,0 +1,226 @@ +# frozen_string_literal: true + +require "rails_helper" +require "s3_cors_rulesets" + +RSpec.describe S3CorsRulesets do + describe "#sync" do + let(:use_db_s3_config) { false } + let(:client) { Aws::S3::Client.new(stub_responses: true) } + + it "does nothing when S3 is not set up" do + client.expects(:get_bucket_cors).never + sync_rules + end + + context "when S3 is set up with global settings" do + let(:use_db_s3_config) { false } + before do + global_setting :s3_use_iam_profile, true + global_setting :s3_bucket, "s3-upload-bucket" + global_setting :s3_backup_bucket, "s3-backup-upload-bucket" + global_setting :s3_region, "us-west-2" + end + + it "does nothing if !s3_install_cors_rule" do + SiteSetting.s3_install_cors_rule = false + client.expects(:get_bucket_cors).never + result = sync_rules + expect(result).to eq(nil) + end + + it "only tries to apply the ASSETS rules by default" do + client.stub_responses(:get_bucket_cors, {}) + client.expects(:put_bucket_cors).with( + bucket: "s3-upload-bucket", + cors_configuration: { + cors_rules: [ + S3CorsRulesets::ASSETS + ] + } + ) + result = sync_rules + expect(result[:assets_rules_applied]).to eq(true) + expect(result[:direct_upload_rules_applied]).to eq(false) + expect(result[:backup_rules_applied]).to eq(false) + end + + it "does not apply the ASSETS rules if they already exist" do + client.stub_responses(:get_bucket_cors, { + cors_rules: [S3CorsRulesets::ASSETS] + }) + client.expects(:put_bucket_cors).never + result = sync_rules + expect(result[:assets_rules_applied]).to eq(false) + expect(result[:direct_upload_rules_applied]).to eq(false) + expect(result[:backup_rules_applied]).to eq(false) + end + + it "applies the ASSETS rules and the BACKUP_DIRECT_UPLOAD rules if S3 backups are enabled" do + setup_backups + + client.stub_responses(:get_bucket_cors, {}) + client.expects(:put_bucket_cors).with( + bucket: "s3-upload-bucket", + cors_configuration: { + cors_rules: [ + S3CorsRulesets::ASSETS + ] + } + ) + client.expects(:put_bucket_cors).with( + bucket: "s3-backup-upload-bucket", + cors_configuration: { + cors_rules: [ + S3CorsRulesets::BACKUP_DIRECT_UPLOAD + ] + } + ) + result = sync_rules + expect(result[:assets_rules_applied]).to eq(true) + expect(result[:direct_upload_rules_applied]).to eq(false) + expect(result[:backup_rules_applied]).to eq(true) + end + + it "applies the ASSETS rules and the DIRECT_UPLOAD rules when S3 direct uploads are enabled" do + SiteSetting.enable_direct_s3_uploads = true + + client.stub_responses(:get_bucket_cors, {}) + client.expects(:put_bucket_cors).with( + bucket: "s3-upload-bucket", + cors_configuration: { + cors_rules: [ + S3CorsRulesets::ASSETS + ] + } + ) + client.expects(:put_bucket_cors).with( + bucket: "s3-upload-bucket", + cors_configuration: { + cors_rules: [ + S3CorsRulesets::DIRECT_UPLOAD + ] + } + ) + result = sync_rules + expect(result[:assets_rules_applied]).to eq(true) + expect(result[:direct_upload_rules_applied]).to eq(true) + expect(result[:backup_rules_applied]).to eq(false) + end + + it "does no changes if all the rules already exist" do + SiteSetting.enable_direct_s3_uploads = true + setup_backups + + client.stub_responses(:get_bucket_cors, { + cors_rules: [S3CorsRulesets::ASSETS, S3CorsRulesets::BACKUP_DIRECT_UPLOAD, S3CorsRulesets::DIRECT_UPLOAD] + }) + client.expects(:put_bucket_cors).never + result = sync_rules + expect(result[:assets_rules_applied]).to eq(false) + expect(result[:direct_upload_rules_applied]).to eq(false) + expect(result[:backup_rules_applied]).to eq(false) + end + + def setup_backups + SiteSetting.enable_backups = true + SiteSetting.s3_backup_bucket = "s3-backup-upload-bucket" + SiteSetting.s3_use_iam_profile = true + SiteSetting.backup_location = BackupLocationSiteSetting::S3 + end + end + + context "when S3 is set up with database settings" do + let(:use_db_s3_config) { true } + + before do + setup_s3 + end + + it "only tries to apply the ASSETS rules by default" do + client.stub_responses(:get_bucket_cors, {}) + client.expects(:put_bucket_cors).with( + bucket: "s3-upload-bucket", + cors_configuration: { + cors_rules: [ + S3CorsRulesets::ASSETS + ] + } + ) + result = sync_rules + expect(result[:assets_rules_applied]).to eq(true) + expect(result[:direct_upload_rules_applied]).to eq(false) + expect(result[:backup_rules_applied]).to eq(false) + end + + it "does not apply the ASSETS rules if they already exist" do + client.stub_responses(:get_bucket_cors, { + cors_rules: [S3CorsRulesets::ASSETS] + }) + client.expects(:put_bucket_cors).never + result = sync_rules + expect(result[:assets_rules_applied]).to eq(false) + expect(result[:direct_upload_rules_applied]).to eq(false) + expect(result[:backup_rules_applied]).to eq(false) + end + + it "applies the ASSETS rules and the BACKUP_DIRECT_UPLOAD rules if S3 backups are enabled" do + SiteSetting.enable_backups = true + SiteSetting.s3_backup_bucket = "s3-backup-upload-bucket" + SiteSetting.backup_location = BackupLocationSiteSetting::S3 + + client.stub_responses(:get_bucket_cors, {}) + client.expects(:put_bucket_cors).with( + bucket: "s3-upload-bucket", + cors_configuration: { + cors_rules: [ + S3CorsRulesets::ASSETS + ] + } + ) + client.expects(:put_bucket_cors).with( + bucket: "s3-backup-upload-bucket", + cors_configuration: { + cors_rules: [ + S3CorsRulesets::BACKUP_DIRECT_UPLOAD + ] + } + ) + result = sync_rules + expect(result[:assets_rules_applied]).to eq(true) + expect(result[:direct_upload_rules_applied]).to eq(false) + expect(result[:backup_rules_applied]).to eq(true) + end + + it "applies the ASSETS rules and the DIRECT_UPLOAD rules when S3 direct uploads are enabled" do + SiteSetting.enable_direct_s3_uploads = true + + client.stub_responses(:get_bucket_cors, {}) + client.expects(:put_bucket_cors).with( + bucket: "s3-upload-bucket", + cors_configuration: { + cors_rules: [ + S3CorsRulesets::ASSETS + ] + } + ) + client.expects(:put_bucket_cors).with( + bucket: "s3-upload-bucket", + cors_configuration: { + cors_rules: [ + S3CorsRulesets::DIRECT_UPLOAD + ] + } + ) + result = sync_rules + expect(result[:assets_rules_applied]).to eq(true) + expect(result[:direct_upload_rules_applied]).to eq(true) + expect(result[:backup_rules_applied]).to eq(false) + end + end + end + + def sync_rules + described_class.sync(use_db_s3_config: use_db_s3_config, s3_client: client) + end +end diff --git a/spec/multisite/s3_store_spec.rb b/spec/multisite/s3_store_spec.rb index 17eb47f5557..1b485dcdd15 100644 --- a/spec/multisite/s3_store_spec.rb +++ b/spec/multisite/s3_store_spec.rb @@ -184,15 +184,16 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do test_multisite_connection('default') do upload = Fabricate(:upload, original_filename: "small.pdf", extension: "pdf", secure: true) + path = Discourse.store.get_path_for_upload(upload) + s3_helper.expects(:s3_bucket).returns(s3_bucket).at_least_once - s3_bucket.expects(:object).with("#{upload_path}/original/1X/#{upload.sha1}.pdf").returns(s3_object).at_least_once + s3_bucket.expects(:object).with("#{upload_path}/#{path}").returns(s3_object).at_least_once s3_object.expects(:presigned_url).with(:get, expires_in: S3Helper::DOWNLOAD_URL_EXPIRES_AFTER_SECONDS) upload.url = store.store_upload(uploaded_file, upload) expect(upload.url).to eq( - "//some-really-cool-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/original/1X/#{upload.sha1}.pdf" + "//some-really-cool-bucket.s3.dualstack.us-west-1.amazonaws.com/#{upload_path}/#{path}" ) - expect(store.url_for(upload)).not_to eq(upload.url) end end