2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-10-03 03:00:42 -04:00
|
|
|
def brotli_s3_path(path)
|
|
|
|
ext = File.extname(path)
|
|
|
|
"#{path[0..-ext.length]}br#{ext}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def gzip_s3_path(path)
|
|
|
|
ext = File.extname(path)
|
|
|
|
"#{path[0..-ext.length]}gz#{ext}"
|
|
|
|
end
|
|
|
|
|
2022-11-07 05:44:45 -05:00
|
|
|
def existing_assets
|
2017-10-06 01:20:01 -04:00
|
|
|
@existing_assets ||= Set.new(helper.list("assets/").map(&:key))
|
2022-11-07 05:44:45 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def prefix_s3_path(path)
|
2022-11-04 13:50:46 -04:00
|
|
|
path = File.join(helper.s3_bucket_folder_path, path) if helper.s3_bucket_folder_path
|
2022-11-07 05:44:45 -05:00
|
|
|
path
|
|
|
|
end
|
|
|
|
|
|
|
|
def should_skip?(path)
|
|
|
|
return false if ENV["FORCE_S3_UPLOADS"]
|
|
|
|
existing_assets.include?(prefix_s3_path(path))
|
2017-10-03 03:00:42 -04:00
|
|
|
end
|
|
|
|
|
2017-10-06 01:20:01 -04:00
|
|
|
def upload(path, remote_path, content_type, content_encoding = nil)
|
2017-10-03 03:00:42 -04:00
|
|
|
options = {
|
|
|
|
cache_control: "max-age=31556952, public, immutable",
|
|
|
|
content_type: content_type,
|
2023-06-06 01:47:40 -04:00
|
|
|
acl: SiteSetting.s3_use_acls ? "public-read" : nil,
|
2017-10-03 03:00:42 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
options[:content_encoding] = content_encoding if content_encoding
|
|
|
|
|
2017-10-06 01:20:01 -04:00
|
|
|
if should_skip?(remote_path)
|
|
|
|
puts "Skipping: #{remote_path}"
|
2017-10-03 03:00:42 -04:00
|
|
|
else
|
2017-10-06 01:20:01 -04:00
|
|
|
puts "Uploading: #{remote_path}"
|
2019-01-04 05:00:45 -05:00
|
|
|
|
|
|
|
File.open(path) { |file| helper.upload(file, remote_path, options) }
|
2017-10-03 03:00:42 -04:00
|
|
|
end
|
2021-08-12 13:20:05 -04:00
|
|
|
|
2022-01-05 12:45:08 -05:00
|
|
|
File.delete(path) if (File.exist?(path) && ENV["DELETE_ASSETS_AFTER_S3_UPLOAD"])
|
2017-10-06 01:20:01 -04:00
|
|
|
end
|
2017-10-03 03:00:42 -04:00
|
|
|
|
2018-11-15 02:10:39 -05:00
|
|
|
def use_db_s3_config
|
|
|
|
ENV["USE_DB_S3_CONFIG"]
|
|
|
|
end
|
|
|
|
|
2017-10-06 01:20:01 -04:00
|
|
|
def helper
|
2021-11-07 18:16:38 -05:00
|
|
|
@helper ||= S3Helper.build_from_config(use_db_s3_config: use_db_s3_config)
|
2017-10-03 03:00:42 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def assets
|
2017-10-03 03:27:09 -04:00
|
|
|
cached = Rails.application.assets&.cached
|
2017-10-03 03:00:42 -04:00
|
|
|
manifest =
|
|
|
|
Sprockets::Manifest.new(
|
|
|
|
cached,
|
|
|
|
Rails.root + "public/assets",
|
|
|
|
Rails.application.config.assets.manifest,
|
|
|
|
)
|
|
|
|
|
2017-10-06 01:20:01 -04:00
|
|
|
results = []
|
2017-10-03 03:00:42 -04:00
|
|
|
|
2017-10-06 01:20:01 -04:00
|
|
|
manifest.assets.each do |_, path|
|
|
|
|
fullpath = (Rails.root + "public/assets/#{path}").to_s
|
|
|
|
|
2020-04-29 13:13:44 -04:00
|
|
|
# Ignore files we can't find the mime type of, like yarn.lock
|
2022-02-10 14:00:47 -05:00
|
|
|
content_type = MiniMime.lookup_by_filename(fullpath)&.content_type
|
|
|
|
content_type ||= "application/json" if fullpath.end_with?(".map")
|
|
|
|
if content_type
|
2020-04-29 13:13:44 -04:00
|
|
|
asset_path = "assets/#{path}"
|
|
|
|
results << [fullpath, asset_path, content_type]
|
2017-10-06 01:20:01 -04:00
|
|
|
|
2020-04-29 13:13:44 -04:00
|
|
|
if File.exist?(fullpath + ".br")
|
|
|
|
results << [fullpath + ".br", brotli_s3_path(asset_path), content_type, "br"]
|
|
|
|
end
|
2017-10-06 01:20:01 -04:00
|
|
|
|
2020-04-29 13:13:44 -04:00
|
|
|
if File.exist?(fullpath + ".gz")
|
|
|
|
results << [fullpath + ".gz", gzip_s3_path(asset_path), content_type, "gzip"]
|
|
|
|
end
|
2017-10-06 01:20:01 -04:00
|
|
|
|
2020-04-29 13:13:44 -04:00
|
|
|
if File.exist?(fullpath + ".map")
|
|
|
|
results << [fullpath + ".map", asset_path + ".map", "application/json"]
|
|
|
|
end
|
2017-10-06 01:20:01 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
results
|
2017-10-03 03:00:42 -04:00
|
|
|
end
|
|
|
|
|
2017-10-08 19:26:58 -04:00
|
|
|
def asset_paths
|
|
|
|
Set.new(assets.map { |_, asset_path| asset_path })
|
2017-10-03 03:00:42 -04:00
|
|
|
end
|
|
|
|
|
2017-10-06 01:20:01 -04:00
|
|
|
def ensure_s3_configured!
|
2018-11-15 02:10:39 -05:00
|
|
|
unless GlobalSetting.use_s3? || use_db_s3_config
|
2019-05-28 04:32:24 -04:00
|
|
|
STDERR.puts "ERROR: Ensure S3 is configured in config/discourse.conf or environment vars"
|
2017-10-06 01:20:01 -04:00
|
|
|
exit 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-03 16:13:06 -05:00
|
|
|
task "s3:correct_acl" => :environment do
|
|
|
|
ensure_s3_configured!
|
|
|
|
|
2023-06-06 01:47:40 -04:00
|
|
|
if !SiteSetting.s3_use_acls
|
|
|
|
$stderr.puts "Not correcting ACLs as the site is configured to not use ACLs"
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2019-01-03 16:13:06 -05:00
|
|
|
puts "ensuring public-read is set on every upload and optimized image"
|
|
|
|
|
|
|
|
i = 0
|
|
|
|
|
|
|
|
base_url = Discourse.store.absolute_base_url
|
|
|
|
|
|
|
|
objects = Upload.pluck(:id, :url).map { |array| array << :upload }
|
|
|
|
objects.concat(OptimizedImage.pluck(:id, :url).map { |array| array << :optimized_image })
|
|
|
|
|
|
|
|
puts "#{objects.length} objects found"
|
|
|
|
|
|
|
|
objects.each do |id, url, type|
|
|
|
|
i += 1
|
|
|
|
if !url.start_with?(base_url)
|
|
|
|
puts "Skipping #{type} #{id} since it is not stored on s3, url is #{url}"
|
|
|
|
else
|
2019-01-03 16:32:09 -05:00
|
|
|
begin
|
|
|
|
key = url[(base_url.length + 1)..-1]
|
|
|
|
object = Discourse.store.s3_helper.object(key)
|
|
|
|
object.acl.put(acl: "public-read")
|
|
|
|
rescue => e
|
|
|
|
puts "Skipping #{type} #{id} url is #{url} #{e}"
|
|
|
|
end
|
2019-01-03 16:13:06 -05:00
|
|
|
end
|
|
|
|
puts "#{i} done" if i % 100 == 0
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-06 13:55:17 -04:00
|
|
|
task "s3:correct_cachecontrol" => :environment do
|
|
|
|
ensure_s3_configured!
|
|
|
|
|
|
|
|
puts "ensuring cache-control is set on every upload and optimized image"
|
|
|
|
|
|
|
|
i = 0
|
|
|
|
|
|
|
|
base_url = Discourse.store.absolute_base_url
|
|
|
|
|
|
|
|
cache_control = "max-age=31556952, public, immutable"
|
|
|
|
|
|
|
|
objects = Upload.pluck(:id, :url).map { |array| array << :upload }
|
|
|
|
objects.concat(OptimizedImage.pluck(:id, :url).map { |array| array << :optimized_image })
|
|
|
|
|
|
|
|
puts "#{objects.length} objects found"
|
|
|
|
|
|
|
|
objects.each do |id, url, type|
|
|
|
|
i += 1
|
|
|
|
if !url.start_with?(base_url)
|
|
|
|
puts "Skipping #{type} #{id} since it is not stored on s3, url is #{url}"
|
|
|
|
else
|
|
|
|
begin
|
|
|
|
key = url[(base_url.length + 1)..-1]
|
|
|
|
object = Discourse.store.s3_helper.object(key)
|
|
|
|
object.copy_from(
|
|
|
|
copy_source: "#{object.bucket_name}/#{object.key}",
|
2023-06-06 01:47:40 -04:00
|
|
|
acl: SiteSetting.s3_use_acls ? "public-read" : nil,
|
2019-08-06 13:55:17 -04:00
|
|
|
cache_control: cache_control,
|
|
|
|
content_type: object.content_type,
|
|
|
|
content_disposition: object.content_disposition,
|
|
|
|
metadata_directive: "REPLACE",
|
|
|
|
)
|
|
|
|
rescue => e
|
|
|
|
puts "Skipping #{type} #{id} url is #{url} #{e}"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
puts "#{i} done" if i % 100 == 0
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-11-07 18:16:38 -05:00
|
|
|
task "s3:ensure_cors_rules" => :environment do
|
2017-10-06 01:20:01 -04:00
|
|
|
ensure_s3_configured!
|
2018-10-14 21:43:31 -04:00
|
|
|
|
2021-11-07 18:16:38 -05:00
|
|
|
puts "Installing CORS rules..."
|
|
|
|
result = S3CorsRulesets.sync(use_db_s3_config: use_db_s3_config)
|
|
|
|
|
2021-11-09 17:00:30 -05:00
|
|
|
if !result
|
|
|
|
puts "skipping"
|
2021-11-10 10:53:55 -05:00
|
|
|
next
|
2021-11-09 17:00:30 -05:00
|
|
|
end
|
|
|
|
|
2021-11-07 20:44:12 -05:00
|
|
|
puts "Assets rules status: #{result[:assets_rules_status]}."
|
|
|
|
puts "Backup rules status: #{result[:backup_rules_status]}."
|
|
|
|
puts "Direct upload rules status: #{result[:direct_upload_rules_status]}."
|
2021-11-07 18:16:38 -05:00
|
|
|
end
|
2017-10-06 01:20:01 -04:00
|
|
|
|
2021-11-07 18:16:38 -05:00
|
|
|
task "s3:upload_assets" => [:environment, "s3:ensure_cors_rules"] do
|
2017-10-06 01:20:01 -04:00
|
|
|
assets.each { |asset| upload(*asset) }
|
2017-10-03 03:00:42 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
task "s3:expire_missing_assets" => :environment do
|
2017-10-06 01:20:01 -04:00
|
|
|
ensure_s3_configured!
|
2017-10-03 03:00:42 -04:00
|
|
|
|
2022-11-07 07:53:14 -05:00
|
|
|
puts "Checking for stale S3 assets..."
|
|
|
|
|
2022-12-30 06:35:40 -05:00
|
|
|
if Discourse.readonly_mode?
|
|
|
|
puts "Discourse is in readonly mode. Skipping s3 asset deletion in case this is a read-only mirror of a live site."
|
|
|
|
exit 0
|
|
|
|
end
|
|
|
|
|
2022-11-07 05:44:45 -05:00
|
|
|
assets_to_delete = existing_assets.dup
|
2017-10-08 19:26:58 -04:00
|
|
|
|
2022-11-07 05:44:45 -05:00
|
|
|
# Check that all current assets are uploaded, and remove them from the to_delete list
|
|
|
|
asset_paths.each do |current_asset_path|
|
|
|
|
uploaded = assets_to_delete.delete?(prefix_s3_path(current_asset_path))
|
|
|
|
if !uploaded
|
|
|
|
puts "A current asset does not exist on S3 (#{current_asset_path}). Aborting cleanup task."
|
|
|
|
exit 1
|
2017-10-03 03:00:42 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-11-07 05:44:45 -05:00
|
|
|
if assets_to_delete.size > 0
|
|
|
|
puts "Found #{assets_to_delete.size} assets to delete..."
|
2022-11-07 07:53:14 -05:00
|
|
|
|
2022-11-07 05:44:45 -05:00
|
|
|
assets_to_delete.each do |to_delete|
|
|
|
|
if !to_delete.start_with?(prefix_s3_path("assets/"))
|
|
|
|
# Sanity check, this should never happen
|
|
|
|
raise "Attempted to delete a non-/asset S3 path (#{to_delete}). Aborting"
|
|
|
|
end
|
2022-11-07 07:53:14 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
assets_to_delete.each_slice(500) do |slice|
|
|
|
|
message = "Deleting #{slice.size} assets...\n"
|
|
|
|
message += slice.join("\n").indent(2)
|
|
|
|
puts message
|
|
|
|
helper.delete_objects(slice)
|
|
|
|
puts "... done"
|
2022-11-07 05:44:45 -05:00
|
|
|
end
|
|
|
|
else
|
|
|
|
puts "No stale assets found"
|
|
|
|
end
|
2017-10-03 03:00:42 -04:00
|
|
|
end
|