PERF: Improve cook_url performance for topic thumbnails (#11609)

- Only initialize the S3Helper when needed
- Skip initializing the S3Helper for S3Store#cdn_url
- Allow cook_url to be passed a `local` hint to skip unnecessary checks
This commit is contained in:
David Taylor 2020-12-30 18:13:13 +00:00 committed by GitHub
parent 1cd6ff15dc
commit 13e39d8b9f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 25 additions and 22 deletions

View File

@ -88,7 +88,7 @@ class Topic < ActiveRecord::Base
Jobs.enqueue(:generate_topic_thumbnails, { topic_id: id, extra_sizes: extra_sizes })
end
infos.each { |i| i[:url] = UrlHelper.cook_url(i[:url], secure: original.secure?) }
infos.each { |i| i[:url] = UrlHelper.cook_url(i[:url], secure: original.secure?, local: true) }
infos.sort_by! { |i| -i[:width] * i[:height] }
end
@ -123,7 +123,7 @@ class Topic < ActiveRecord::Base
end
raw_url = thumbnail&.optimized_image&.url || image_upload&.url
UrlHelper.cook_url(raw_url, secure: image_upload&.secure?)
UrlHelper.cook_url(raw_url, secure: image_upload&.secure?, local: true) if raw_url
end
def featured_users

View File

@ -11,10 +11,12 @@ module FileStore
class S3Store < BaseStore
TOMBSTONE_PREFIX ||= "tombstone/"
attr_reader :s3_helper
def initialize(s3_helper = nil)
@s3_helper = s3_helper || S3Helper.new(s3_bucket,
@s3_helper = s3_helper
end
def s3_helper
@s3_helper ||= S3Helper.new(s3_bucket,
Rails.configuration.multisite ? multisite_tombstone_prefix : TOMBSTONE_PREFIX
)
end
@ -68,7 +70,7 @@ module FileStore
path.prepend(File.join(upload_path, "/")) if Rails.configuration.multisite
# if this fails, it will throw an exception
path, etag = @s3_helper.upload(file, path, options)
path, etag = s3_helper.upload(file, path, options)
# return the upload url and etag
[File.join(absolute_base_url, path), etag]
@ -77,12 +79,12 @@ module FileStore
def remove_file(url, path)
return unless has_been_uploaded?(url)
# copy the removed file to tombstone
@s3_helper.remove(path, true)
s3_helper.remove(path, true)
end
def copy_file(url, source, destination)
return unless has_been_uploaded?(url)
@s3_helper.copy(source, destination)
s3_helper.copy(source, destination)
end
def has_been_uploaded?(url)
@ -119,11 +121,11 @@ module FileStore
end
def s3_bucket_folder_path
@s3_helper.s3_bucket_folder_path
S3Helper.get_bucket_and_folder_path(s3_bucket)[1]
end
def s3_bucket_name
@s3_helper.s3_bucket_name
S3Helper.get_bucket_and_folder_path(s3_bucket)[0]
end
def absolute_base_url
@ -139,7 +141,7 @@ module FileStore
end
def purge_tombstone(grace_period)
@s3_helper.update_tombstone_lifecycle(grace_period)
s3_helper.update_tombstone_lifecycle(grace_period)
end
def multisite_tombstone_prefix
@ -165,7 +167,7 @@ module FileStore
def cdn_url(url)
return url if SiteSetting.Upload.s3_cdn_url.blank?
schema = url[/^(https?:)?\/\//, 1]
folder = @s3_helper.s3_bucket_folder_path.nil? ? "" : "#{@s3_helper.s3_bucket_folder_path}/"
folder = s3_bucket_folder_path.nil? ? "" : "#{s3_bucket_folder_path}/"
url.sub(File.join("#{schema}#{absolute_base_url}", folder), File.join(SiteSetting.Upload.s3_cdn_url, "/"))
end
@ -177,7 +179,7 @@ module FileStore
def cache_avatar(avatar, user_id)
source = avatar.url.sub(absolute_base_url + "/", "")
destination = avatar_template(avatar, user_id).sub(absolute_base_url + "/", "")
@s3_helper.copy(source, destination)
s3_helper.copy(source, destination)
end
def avatar_template(avatar, user_id)
@ -213,7 +215,7 @@ module FileStore
end
def download_file(upload, destination_path)
@s3_helper.download_file(get_upload_key(upload), destination_path)
s3_helper.download_file(get_upload_key(upload), destination_path)
end
def copy_from(source_path)
@ -258,7 +260,7 @@ module FileStore
)
end
obj = @s3_helper.object(url)
obj = s3_helper.object(url)
obj.presigned_url(:get, opts)
end
@ -272,7 +274,7 @@ module FileStore
def update_ACL(key, secure)
begin
@s3_helper.object(key).acl.put(acl: secure ? "private" : "public-read")
s3_helper.object(key).acl.put(acl: secure ? "private" : "public-read")
rescue Aws::S3::Errors::NoSuchKey
Rails.logger.warn("Could not update ACL on upload with key: '#{key}'. Upload is missing.")
end
@ -282,7 +284,7 @@ module FileStore
connection = ActiveRecord::Base.connection.raw_connection
connection.exec('CREATE TEMP TABLE verified_ids(val integer PRIMARY KEY)')
marker = nil
files = @s3_helper.list(prefix, marker)
files = s3_helper.list(prefix, marker)
while files.count > 0 do
verified_ids = []
@ -295,7 +297,7 @@ module FileStore
verified_id_clause = verified_ids.map { |id| "('#{PG::Connection.escape_string(id.to_s)}')" }.join(",")
connection.exec("INSERT INTO verified_ids VALUES #{verified_id_clause}")
files = @s3_helper.list(prefix, marker)
files = s3_helper.list(prefix, marker)
end
missing_uploads = model.joins('LEFT JOIN verified_ids ON verified_ids.val = id').where("verified_ids.val IS NULL")

View File

@ -76,8 +76,9 @@ class UrlHelper
url[/x-amz-(algorithm|credential)/i].present?
end
def self.cook_url(url, secure: false)
return url unless is_local(url)
def self.cook_url(url, secure: false, local: nil)
local = is_local(url) if local.nil?
return url if !local
url = secure ? secure_proxy_without_cdn(url) : absolute_without_cdn(url)

View File

@ -6,7 +6,7 @@ require 'file_store/local_store'
describe FileStore::S3Store do
let(:store) { FileStore::S3Store.new }
let(:s3_helper) { store.instance_variable_get(:@s3_helper) }
let(:s3_helper) { store.s3_helper }
let(:client) { Aws::S3::Client.new(stub_responses: true) }
let(:resource) { Aws::S3::Resource.new(client: client) }
let(:s3_bucket) { resource.bucket("s3-upload-bucket") }

View File

@ -169,7 +169,7 @@ RSpec.describe 'Multisite s3 uploads', type: :multisite do
let(:client) { Aws::S3::Client.new(stub_responses: true) }
let(:resource) { Aws::S3::Resource.new(client: client) }
let(:s3_bucket) { resource.bucket("some-really-cool-bucket") }
let(:s3_helper) { store.instance_variable_get(:@s3_helper) }
let(:s3_helper) { store.s3_helper }
let(:s3_object) { stub }
before(:each) do