discourse/lib/file_store/base_store.rb

187 lines
4.5 KiB
Ruby
Raw Normal View History

# frozen_string_literal: true
2013-11-05 13:04:47 -05:00
module FileStore
class BaseStore
2014-04-15 07:04:14 -04:00
def store_upload(file, upload, content_type = nil)
2015-05-29 12:39:47 -04:00
path = get_path_for_upload(upload)
store_file(file, path)
2013-11-05 13:04:47 -05:00
end
FEATURE: Secure media allowing duplicated uploads with category-level privacy and post-based access rules (#8664) ### General Changes and Duplication * We now consider a post `with_secure_media?` if it is in a read-restricted category. * When uploading we now set an upload's secure status straight away. * When uploading if `SiteSetting.secure_media` is enabled, we do not check to see if the upload already exists using the `sha1` digest of the upload. The `sha1` column of the upload is filled with a `SecureRandom.hex(20)` value which is the same length as `Upload::SHA1_LENGTH`. The `original_sha1` column is filled with the _real_ sha1 digest of the file. * Whether an upload `should_be_secure?` is now determined by whether the `access_control_post` is `with_secure_media?` (if there is no access control post then we leave the secure status as is). * When serializing the upload, we now cook the URL if the upload is secure. This is so it shows up correctly in the composer preview, because we set secure status on upload. ### Viewing Secure Media * The secure-media-upload URL will take the post that the upload is attached to into account via `Guardian.can_see?` for access permissions * If there is no `access_control_post` then we just deliver the media. This should be a rare occurrance and shouldn't cause issues as the `access_control_post` is set when `link_post_uploads` is called via `CookedPostProcessor` ### Removed We no longer do any of these because we do not reuse uploads by sha1 if secure media is enabled. * We no longer have a way to prevent cross-posting of a secure upload from a private context to a public context. * We no longer have to set `secure: false` for uploads when uploading for a theme component.
2020-01-15 22:50:27 -05:00
def store_optimized_image(file, optimized_image, content_type = nil, secure: false)
2015-05-29 12:39:47 -04:00
path = get_path_for_optimized_image(optimized_image)
store_file(file, path)
end
def store_file(file, path, opts = {})
not_implemented
2013-11-05 13:04:47 -05:00
end
def remove_upload(upload)
remove_file(upload.url, get_path_for_upload(upload))
2013-11-05 13:04:47 -05:00
end
def remove_optimized_image(optimized_image)
remove_file(optimized_image.url, get_path_for_optimized_image(optimized_image))
2015-05-29 12:39:47 -04:00
end
def remove_file(url, path)
not_implemented
2013-11-05 13:04:47 -05:00
end
def upload_path
path = File.join("uploads", RailsMultisite::ConnectionManagement.current_db)
return path unless Discourse.is_parallel_test?
n = ENV['TEST_ENV_NUMBER'].presence || '1'
File.join(path, n)
end
2013-11-05 13:04:47 -05:00
def has_been_uploaded?(url)
not_implemented
2013-11-05 13:04:47 -05:00
end
2015-05-29 12:39:47 -04:00
def download_url(upload)
not_implemented
2013-11-05 13:04:47 -05:00
end
2015-05-29 12:39:47 -04:00
def cdn_url(url)
not_implemented
2013-11-05 13:04:47 -05:00
end
2015-05-29 12:39:47 -04:00
def absolute_base_url
not_implemented
2015-05-29 12:39:47 -04:00
end
def relative_base_url
not_implemented
end
def s3_upload_host
not_implemented
end
2013-11-05 13:04:47 -05:00
def external?
not_implemented
2013-11-05 13:04:47 -05:00
end
def internal?
2015-05-29 12:39:47 -04:00
!external?
2013-11-05 13:04:47 -05:00
end
def path_for(upload)
not_implemented
2013-11-05 13:04:47 -05:00
end
def list_missing_uploads(skip_optimized: false)
not_implemented
end
2013-11-05 13:04:47 -05:00
def download(upload)
DistributedMutex.synchronize("download_#{upload.sha1}") do
filename = "#{upload.sha1}#{File.extname(upload.original_filename)}"
file = get_from_cache(filename)
if !file
max_file_size_kb = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
url = upload.secure? ?
Discourse.store.signed_url_for_path(upload.url) :
Discourse.store.cdn_url(upload.url)
url = SiteSetting.scheme + ":" + url if url =~ /^\/\//
file = FileHelper.download(
url,
max_file_size: max_file_size_kb,
tmp_file_name: "discourse-download",
follow_redirect: true
)
cache_file(file, filename)
file = get_from_cache(filename)
end
file
end
2013-11-05 13:04:47 -05:00
end
2013-11-27 16:01:41 -05:00
def purge_tombstone(grace_period)
end
def get_path_for(type, id, sha, extension)
depth = get_depth_for(id)
2018-12-26 11:34:49 -05:00
tree = File.join(*sha[0, depth].chars, "")
"#{type}/#{depth + 1}X/#{tree}#{sha}#{extension}"
end
def get_path_for_upload(upload)
extension =
if upload.extension
".#{upload.extension}"
else
2018-12-26 11:34:49 -05:00
# Maintain backward compatibility before Jobs::MigrateUploadExtensions runs
File.extname(upload.original_filename)
end
get_path_for("original".freeze, upload.id, upload.sha1, extension)
end
def get_path_for_optimized_image(optimized_image)
upload = optimized_image.upload
version = optimized_image.version || 1
extension = "_#{version}_#{optimized_image.width}x#{optimized_image.height}#{optimized_image.extension}"
get_path_for("optimized".freeze, upload.id, upload.sha1, extension)
end
CACHE_DIR ||= "#{Rails.root}/tmp/download_cache/"
CACHE_MAXIMUM_SIZE ||= 500
def get_cache_path_for(filename)
"#{CACHE_DIR}#{filename}"
end
def get_from_cache(filename)
path = get_cache_path_for(filename)
File.open(path) if File.exists?(path)
end
def cache_file(file, filename)
path = get_cache_path_for(filename)
dir = File.dirname(path)
FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
FileUtils.cp(file.path, path)
# Keep latest 500 files
processes = Open3.pipeline(
["ls -t #{CACHE_DIR}", err: "/dev/null"],
"tail -n +#{CACHE_MAXIMUM_SIZE + 1}",
"awk '$0=\"#{CACHE_DIR}\"$0'",
"xargs rm -f"
)
ls = processes.shift
# Exit status `1` in `ls` occurs when e.g. "listing a directory
# in which entries are actively being removed or renamed".
# It's safe to ignore it here.
if ![0, 1].include?(ls.exitstatus) || !processes.all?(&:success?)
raise "Error clearing old cache"
end
end
private
def not_implemented
raise "Not implemented."
end
def get_depth_for(id)
depths = [0]
depths << Math.log(id / 1_000.0, 16).ceil if id.positive?
depths.max
end
2013-11-05 13:04:47 -05:00
end
end