2015-05-25 11:59:00 -04:00
|
|
|
require "file_store/base_store"
|
2014-09-24 16:52:09 -04:00
|
|
|
require_dependency "s3_helper"
|
2014-04-15 07:04:14 -04:00
|
|
|
require_dependency "file_helper"
|
2015-05-25 23:08:31 -04:00
|
|
|
require_dependency "file_store/local_store"
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
module FileStore
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
class S3Store < BaseStore
|
2014-09-24 16:52:09 -04:00
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
TOMBSTONE_PREFIX ||= "tombstone/"
|
|
|
|
|
|
|
|
def initialize(s3_helper=nil)
|
|
|
|
@s3_helper = s3_helper || S3Helper.new(s3_bucket, TOMBSTONE_PREFIX)
|
2014-09-24 16:52:09 -04:00
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
def store_upload(file, upload, content_type=nil)
|
2013-11-05 13:04:47 -05:00
|
|
|
path = get_path_for_upload(file, upload)
|
2015-05-25 11:59:00 -04:00
|
|
|
store_file(file, path, filename: upload.original_filename, content_type: content_type, cache_locally: true)
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def store_optimized_image(file, optimized_image)
|
|
|
|
path = get_path_for_optimized_image(file, optimized_image)
|
|
|
|
store_file(file, path)
|
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def remove_upload(upload)
|
|
|
|
remove_file(upload.url)
|
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def remove_optimized_image(optimized_image)
|
|
|
|
remove_file(optimized_image.url)
|
|
|
|
end
|
2013-08-13 16:08:29 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def has_been_uploaded?(url)
|
2015-05-26 05:47:33 -04:00
|
|
|
return false if url.blank?
|
|
|
|
return true if url.start_with?(absolute_base_url)
|
|
|
|
return true if SiteSetting.s3_cdn_url.present? && url.start_with?(SiteSetting.s3_cdn_url)
|
|
|
|
false
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
2013-08-13 16:08:29 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def absolute_base_url
|
2015-05-25 11:59:00 -04:00
|
|
|
@absolute_base_url ||= "//#{s3_bucket}.s3-#{s3_region}.amazonaws.com"
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
2013-08-13 16:08:29 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def external?
|
|
|
|
true
|
|
|
|
end
|
2013-08-13 16:08:29 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def internal?
|
|
|
|
!external?
|
|
|
|
end
|
2013-08-13 16:08:29 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def download(upload)
|
2014-09-24 16:52:09 -04:00
|
|
|
return unless has_been_uploaded?(upload.url)
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
DistributedMutex.synchronize("s3_download_#{upload.sha1}") do
|
|
|
|
filename = "#{upload.sha1}#{File.extname(upload.original_filename)}"
|
|
|
|
file = get_from_cache(filename)
|
|
|
|
|
|
|
|
if !file
|
2015-05-26 10:39:41 -04:00
|
|
|
max_file_size_kb = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
|
2015-05-25 11:59:00 -04:00
|
|
|
url = SiteSetting.scheme + ":" + upload.url
|
2015-05-26 10:39:41 -04:00
|
|
|
file = FileHelper.download(url, max_file_size_kb, "discourse-s3", true)
|
2015-05-25 11:59:00 -04:00
|
|
|
cache_file(file, filename)
|
|
|
|
end
|
|
|
|
|
|
|
|
file
|
|
|
|
end
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2013-11-27 16:01:41 -05:00
|
|
|
def purge_tombstone(grace_period)
|
2014-09-24 16:52:09 -04:00
|
|
|
@s3_helper.update_tombstone_lifecycle(grace_period)
|
2013-11-27 16:01:41 -05:00
|
|
|
end
|
|
|
|
|
2015-05-25 23:08:31 -04:00
|
|
|
def path_for(upload)
|
|
|
|
url = upload.url
|
|
|
|
if url && url[0] == "/" && url[1] != "/"
|
|
|
|
FileStore::LocalStore.new.path_for(upload)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
private
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2014-09-24 16:52:09 -04:00
|
|
|
def get_path_for_upload(file, upload)
|
2015-05-25 11:59:00 -04:00
|
|
|
get_path_for("original".freeze, upload.sha1, upload.extension)
|
2014-07-05 15:33:39 -04:00
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2014-09-24 16:52:09 -04:00
|
|
|
def get_path_for_optimized_image(file, optimized_image)
|
2015-05-25 11:59:00 -04:00
|
|
|
extension = "_#{optimized_image.width}x#{optimized_image.height}#{optimized_image.extension}"
|
|
|
|
get_path_for("optimized".freeze, optimized_image.sha1, extension)
|
2014-09-09 12:43:02 -04:00
|
|
|
end
|
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
def get_path_for(type, sha, extension)
|
|
|
|
"#{type}/#{sha[0]}/#{sha[1]}/#{sha}#{extension}"
|
2014-09-24 16:52:09 -04:00
|
|
|
end
|
2013-11-05 13:04:47 -05:00
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
# options
|
|
|
|
# - filename
|
|
|
|
# - content_type
|
|
|
|
# - cache_locally
|
|
|
|
def store_file(file, path, opts={})
|
|
|
|
filename = opts[:filename].presence
|
|
|
|
content_type = opts[:content_type].presence
|
|
|
|
# cache file locally when needed
|
|
|
|
cache_file(file, File.basename(path)) if opts[:cache_locally]
|
2014-09-24 16:52:09 -04:00
|
|
|
# stored uploaded are public by default
|
2015-05-25 11:59:00 -04:00
|
|
|
options = { acl: "public-read" }
|
2014-09-24 16:52:09 -04:00
|
|
|
# add a "content disposition" header for "attachments"
|
|
|
|
options[:content_disposition] = "attachment; filename=\"#{filename}\"" if filename && !FileHelper.is_image?(filename)
|
2015-05-25 11:59:00 -04:00
|
|
|
# add a "content type" header when provided
|
2014-09-24 16:52:09 -04:00
|
|
|
options[:content_type] = content_type if content_type
|
|
|
|
# if this fails, it will throw an exception
|
|
|
|
@s3_helper.upload(file, path, options)
|
|
|
|
# return the upload url
|
|
|
|
"#{absolute_base_url}/#{path}"
|
|
|
|
end
|
2013-11-27 16:01:41 -05:00
|
|
|
|
2014-09-24 16:52:09 -04:00
|
|
|
def remove_file(url)
|
|
|
|
return unless has_been_uploaded?(url)
|
|
|
|
filename = File.basename(url)
|
|
|
|
# copy the removed file to tombstone
|
|
|
|
@s3_helper.remove(filename, true)
|
|
|
|
end
|
2013-07-31 17:26:34 -04:00
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
CACHE_DIR ||= "#{Rails.root}/tmp/s3_cache/"
|
|
|
|
CACHE_MAXIMUM_SIZE ||= 500
|
|
|
|
|
|
|
|
def get_cache_path_for(filename)
|
|
|
|
"#{CACHE_DIR}#{filename}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_from_cache(filename)
|
|
|
|
path = get_cache_path_for(filename)
|
|
|
|
File.open(path) if File.exists?(path)
|
|
|
|
end
|
|
|
|
|
|
|
|
def cache_file(file, filename)
|
|
|
|
path = get_cache_path_for(filename)
|
|
|
|
dir = File.dirname(path)
|
|
|
|
FileUtils.mkdir_p(dir) unless Dir[dir].present?
|
|
|
|
FileUtils.cp(file.path, path)
|
|
|
|
# keep up to 500 files
|
|
|
|
`ls -tr #{CACHE_DIR} | head -n +#{CACHE_MAXIMUM_SIZE} | xargs rm -f`
|
|
|
|
end
|
|
|
|
|
2014-09-24 16:52:09 -04:00
|
|
|
def s3_bucket
|
|
|
|
return @s3_bucket if @s3_bucket
|
|
|
|
raise Discourse::SiteSettingMissing.new("s3_upload_bucket") if SiteSetting.s3_upload_bucket.blank?
|
|
|
|
@s3_bucket = SiteSetting.s3_upload_bucket.downcase
|
2013-12-16 05:44:59 -05:00
|
|
|
end
|
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
def s3_region
|
|
|
|
SiteSetting.s3_region
|
2014-09-24 16:52:09 -04:00
|
|
|
end
|
2013-12-16 05:44:59 -05:00
|
|
|
|
2013-07-31 17:26:34 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|