2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
module FileStore
|
|
|
|
|
|
|
|
class BaseStore
|
|
|
|
|
2014-04-15 07:04:14 -04:00
|
|
|
def store_upload(file, upload, content_type = nil)
|
2015-05-29 12:39:47 -04:00
|
|
|
path = get_path_for_upload(upload)
|
|
|
|
store_file(file, path)
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2020-01-15 22:50:27 -05:00
|
|
|
def store_optimized_image(file, optimized_image, content_type = nil, secure: false)
|
2015-05-29 12:39:47 -04:00
|
|
|
path = get_path_for_optimized_image(optimized_image)
|
|
|
|
store_file(file, path)
|
|
|
|
end
|
|
|
|
|
|
|
|
def store_file(file, path, opts = {})
|
2016-08-11 23:43:57 -04:00
|
|
|
not_implemented
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def remove_upload(upload)
|
2016-08-14 23:21:24 -04:00
|
|
|
remove_file(upload.url, get_path_for_upload(upload))
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def remove_optimized_image(optimized_image)
|
2016-08-14 23:21:24 -04:00
|
|
|
remove_file(optimized_image.url, get_path_for_optimized_image(optimized_image))
|
2015-05-29 12:39:47 -04:00
|
|
|
end
|
|
|
|
|
2016-08-14 23:21:24 -04:00
|
|
|
def remove_file(url, path)
|
2016-08-11 23:43:57 -04:00
|
|
|
not_implemented
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2018-11-28 23:11:48 -05:00
|
|
|
def upload_path
|
2019-12-18 00:51:57 -05:00
|
|
|
path = File.join("uploads", RailsMultisite::ConnectionManagement.current_db)
|
|
|
|
return path unless Discourse.is_parallel_test?
|
|
|
|
|
|
|
|
n = ENV['TEST_ENV_NUMBER'].presence || '1'
|
|
|
|
File.join(path, n)
|
2018-11-28 23:11:48 -05:00
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def has_been_uploaded?(url)
|
2016-08-11 23:43:57 -04:00
|
|
|
not_implemented
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2015-05-29 12:39:47 -04:00
|
|
|
def download_url(upload)
|
2016-08-11 23:43:57 -04:00
|
|
|
not_implemented
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2015-05-29 12:39:47 -04:00
|
|
|
def cdn_url(url)
|
2016-08-11 23:43:57 -04:00
|
|
|
not_implemented
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2015-05-29 12:39:47 -04:00
|
|
|
def absolute_base_url
|
2016-08-11 23:43:57 -04:00
|
|
|
not_implemented
|
2015-05-29 12:39:47 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def relative_base_url
|
2016-08-11 23:43:57 -04:00
|
|
|
not_implemented
|
2014-10-15 13:20:04 -04:00
|
|
|
end
|
|
|
|
|
2019-11-17 20:25:42 -05:00
|
|
|
def s3_upload_host
|
|
|
|
not_implemented
|
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def external?
|
2016-08-11 23:43:57 -04:00
|
|
|
not_implemented
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def internal?
|
2015-05-29 12:39:47 -04:00
|
|
|
!external?
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def path_for(upload)
|
2016-08-11 23:43:57 -04:00
|
|
|
not_implemented
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2018-11-26 14:24:51 -05:00
|
|
|
def list_missing_uploads(skip_optimized: false)
|
|
|
|
not_implemented
|
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def download(upload)
|
2015-06-01 05:13:56 -04:00
|
|
|
DistributedMutex.synchronize("download_#{upload.sha1}") do
|
|
|
|
filename = "#{upload.sha1}#{File.extname(upload.original_filename)}"
|
|
|
|
file = get_from_cache(filename)
|
|
|
|
|
|
|
|
if !file
|
|
|
|
max_file_size_kb = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
|
2019-11-17 20:25:42 -05:00
|
|
|
|
|
|
|
url = upload.secure? ?
|
|
|
|
Discourse.store.signed_url_for_path(upload.url) :
|
|
|
|
Discourse.store.cdn_url(upload.url)
|
|
|
|
|
2019-06-06 09:47:19 -04:00
|
|
|
url = SiteSetting.scheme + ":" + url if url =~ /^\/\//
|
2017-05-24 13:42:52 -04:00
|
|
|
file = FileHelper.download(
|
|
|
|
url,
|
|
|
|
max_file_size: max_file_size_kb,
|
|
|
|
tmp_file_name: "discourse-download",
|
|
|
|
follow_redirect: true
|
|
|
|
)
|
2015-06-01 05:13:56 -04:00
|
|
|
cache_file(file, filename)
|
2019-05-17 06:26:08 -04:00
|
|
|
file = get_from_cache(filename)
|
2015-06-01 05:13:56 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
file
|
|
|
|
end
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2013-11-27 16:01:41 -05:00
|
|
|
def purge_tombstone(grace_period)
|
|
|
|
end
|
|
|
|
|
2015-05-27 19:03:24 -04:00
|
|
|
def get_path_for(type, id, sha, extension)
|
2015-10-16 17:08:16 -04:00
|
|
|
depth = get_depth_for(id)
|
2018-12-26 11:34:49 -05:00
|
|
|
tree = File.join(*sha[0, depth].chars, "")
|
2015-05-27 19:03:24 -04:00
|
|
|
"#{type}/#{depth + 1}X/#{tree}#{sha}#{extension}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_path_for_upload(upload)
|
2017-08-02 22:56:55 -04:00
|
|
|
extension =
|
|
|
|
if upload.extension
|
|
|
|
".#{upload.extension}"
|
|
|
|
else
|
2018-12-26 11:34:49 -05:00
|
|
|
# Maintain backward compatibility before Jobs::MigrateUploadExtensions runs
|
2017-08-02 22:56:55 -04:00
|
|
|
File.extname(upload.original_filename)
|
|
|
|
end
|
|
|
|
|
|
|
|
get_path_for("original".freeze, upload.id, upload.sha1, extension)
|
2015-05-27 19:03:24 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def get_path_for_optimized_image(optimized_image)
|
|
|
|
upload = optimized_image.upload
|
2019-01-11 08:05:38 -05:00
|
|
|
version = optimized_image.version || 1
|
|
|
|
extension = "_#{version}_#{optimized_image.width}x#{optimized_image.height}#{optimized_image.extension}"
|
2015-05-27 19:03:24 -04:00
|
|
|
get_path_for("optimized".freeze, upload.id, upload.sha1, extension)
|
|
|
|
end
|
|
|
|
|
2015-06-01 05:13:56 -04:00
|
|
|
CACHE_DIR ||= "#{Rails.root}/tmp/download_cache/"
|
|
|
|
CACHE_MAXIMUM_SIZE ||= 500
|
|
|
|
|
|
|
|
def get_cache_path_for(filename)
|
|
|
|
"#{CACHE_DIR}#{filename}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_from_cache(filename)
|
|
|
|
path = get_cache_path_for(filename)
|
|
|
|
File.open(path) if File.exists?(path)
|
|
|
|
end
|
|
|
|
|
|
|
|
def cache_file(file, filename)
|
|
|
|
path = get_cache_path_for(filename)
|
|
|
|
dir = File.dirname(path)
|
2018-07-04 12:18:39 -04:00
|
|
|
FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
|
2015-06-01 05:13:56 -04:00
|
|
|
FileUtils.cp(file.path, path)
|
2020-01-26 20:59:54 -05:00
|
|
|
|
|
|
|
# Keep latest 500 files
|
2019-11-08 10:34:03 -05:00
|
|
|
processes = Open3.pipeline(
|
2020-01-26 20:59:54 -05:00
|
|
|
["ls -t #{CACHE_DIR}", err: "/dev/null"],
|
2019-11-08 10:34:03 -05:00
|
|
|
"tail -n +#{CACHE_MAXIMUM_SIZE + 1}",
|
|
|
|
"awk '$0=\"#{CACHE_DIR}\"$0'",
|
|
|
|
"xargs rm -f"
|
|
|
|
)
|
2020-01-26 20:59:54 -05:00
|
|
|
|
|
|
|
ls = processes.shift
|
|
|
|
|
|
|
|
# Exit status `1` in `ls` occurs when e.g. "listing a directory
|
|
|
|
# in which entries are actively being removed or renamed".
|
|
|
|
# It's safe to ignore it here.
|
|
|
|
if ![0, 1].include?(ls.exitstatus) || !processes.all?(&:success?)
|
|
|
|
raise "Error clearing old cache"
|
|
|
|
end
|
2015-06-01 05:13:56 -04:00
|
|
|
end
|
|
|
|
|
2016-08-11 23:43:57 -04:00
|
|
|
private
|
|
|
|
|
|
|
|
def not_implemented
|
|
|
|
raise "Not implemented."
|
|
|
|
end
|
|
|
|
|
2019-01-02 02:29:17 -05:00
|
|
|
def get_depth_for(id)
|
|
|
|
depths = [0]
|
|
|
|
depths << Math.log(id / 1_000.0, 16).ceil if id.positive?
|
|
|
|
depths.max
|
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|