FIX: S3 uploads were missing a cache-control header (#7902)

Admins still need to run the rake task to fix the files who where uploaded previously.
This commit is contained in:
Rafael dos Santos Silva 2019-08-06 14:55:17 -03:00 committed by GitHub
parent 4b9d35cd0e
commit 606c0ed14d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 45 additions and 0 deletions

View File

@ -43,6 +43,7 @@ module FileStore
cache_file(file, File.basename(path)) if opts[:cache_locally]
options = {
acl: opts[:private] ? "private" : "public-read",
cache_control: 'max-age=31556952, public, immutable',
content_type: opts[:content_type].presence || MiniMime.lookup_by_filename(filename)&.content_type
}
# add a "content disposition" header for "attachments"

View File

@ -140,6 +140,50 @@ task 's3:correct_acl' => :environment do
end
task 's3:correct_cachecontrol' => :environment do
ensure_s3_configured!
puts "ensuring cache-control is set on every upload and optimized image"
i = 0
base_url = Discourse.store.absolute_base_url
acl = SiteSetting.prevent_anons_from_downloading_files ? 'private' : 'public-read'
cache_control = 'max-age=31556952, public, immutable'
objects = Upload.pluck(:id, :url).map { |array| array << :upload }
objects.concat(OptimizedImage.pluck(:id, :url).map { |array| array << :optimized_image })
puts "#{objects.length} objects found"
objects.each do |id, url, type|
i += 1
if !url.start_with?(base_url)
puts "Skipping #{type} #{id} since it is not stored on s3, url is #{url}"
else
begin
key = url[(base_url.length + 1)..-1]
object = Discourse.store.s3_helper.object(key)
object.copy_from(
copy_source: "#{object.bucket_name}/#{object.key}",
acl: acl,
cache_control: cache_control,
content_type: object.content_type,
content_disposition: object.content_disposition,
metadata_directive: 'REPLACE'
)
rescue => e
puts "Skipping #{type} #{id} url is #{url} #{e}"
end
end
if i % 100 == 0
puts "#{i} done"
end
end
end
task 's3:upload_assets' => :environment do
ensure_s3_configured!