diff --git a/lib/file_store/s3_store.rb b/lib/file_store/s3_store.rb index 9b27c7e2dbf..38c18432250 100644 --- a/lib/file_store/s3_store.rb +++ b/lib/file_store/s3_store.rb @@ -43,6 +43,7 @@ module FileStore cache_file(file, File.basename(path)) if opts[:cache_locally] options = { acl: opts[:private] ? "private" : "public-read", + cache_control: 'max-age=31556952, public, immutable', content_type: opts[:content_type].presence || MiniMime.lookup_by_filename(filename)&.content_type } # add a "content disposition" header for "attachments" diff --git a/lib/tasks/s3.rake b/lib/tasks/s3.rake index 28b0fd62cfe..7913df4bc0a 100644 --- a/lib/tasks/s3.rake +++ b/lib/tasks/s3.rake @@ -140,6 +140,50 @@ task 's3:correct_acl' => :environment do end +task 's3:correct_cachecontrol' => :environment do + ensure_s3_configured! + + puts "ensuring cache-control is set on every upload and optimized image" + + i = 0 + + base_url = Discourse.store.absolute_base_url + + acl = SiteSetting.prevent_anons_from_downloading_files ? 'private' : 'public-read' + cache_control = 'max-age=31556952, public, immutable' + + objects = Upload.pluck(:id, :url).map { |array| array << :upload } + objects.concat(OptimizedImage.pluck(:id, :url).map { |array| array << :optimized_image }) + + puts "#{objects.length} objects found" + + objects.each do |id, url, type| + i += 1 + if !url.start_with?(base_url) + puts "Skipping #{type} #{id} since it is not stored on s3, url is #{url}" + else + begin + key = url[(base_url.length + 1)..-1] + object = Discourse.store.s3_helper.object(key) + object.copy_from( + copy_source: "#{object.bucket_name}/#{object.key}", + acl: acl, + cache_control: cache_control, + content_type: object.content_type, + content_disposition: object.content_disposition, + metadata_directive: 'REPLACE' + ) + rescue => e + puts "Skipping #{type} #{id} url is #{url} #{e}" + end + end + if i % 100 == 0 + puts "#{i} done" + end + end + +end + task 's3:upload_assets' => :environment do ensure_s3_configured!