2013-06-15 05:29:20 -04:00
|
|
|
require "digest/sha1"
|
|
|
|
|
2016-04-11 14:42:40 -04:00
|
|
|
################################################################################
|
|
|
|
# gather #
|
|
|
|
################################################################################
|
|
|
|
|
|
|
|
task "uploads:gather" => :environment do
|
|
|
|
require "db_helper"
|
|
|
|
|
2016-04-12 10:00:25 -04:00
|
|
|
ENV["RAILS_DB"] ? gather_uploads : gather_uploads_for_all_sites
|
|
|
|
end
|
|
|
|
|
|
|
|
def gather_uploads_for_all_sites
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection { gather_uploads }
|
|
|
|
end
|
|
|
|
|
2016-04-13 10:33:00 -04:00
|
|
|
def file_exists?(path)
|
|
|
|
File.exists?(path) && File.size(path) > 0
|
|
|
|
rescue
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
2016-04-12 10:00:25 -04:00
|
|
|
def gather_uploads
|
2016-04-11 14:42:40 -04:00
|
|
|
public_directory = "#{Rails.root}/public"
|
|
|
|
current_db = RailsMultisite::ConnectionManagement.current_db
|
|
|
|
|
|
|
|
puts "", "Gathering uploads for '#{current_db}'...", ""
|
|
|
|
|
2016-04-11 15:17:33 -04:00
|
|
|
Upload.where("url ~ '^\/uploads\/'")
|
|
|
|
.where("url !~ '^\/uploads\/#{current_db}'")
|
|
|
|
.find_each do |upload|
|
2016-04-11 14:42:40 -04:00
|
|
|
begin
|
|
|
|
old_db = upload.url[/^\/uploads\/([^\/]+)\//, 1]
|
|
|
|
from = upload.url.dup
|
|
|
|
to = upload.url.sub("/uploads/#{old_db}/", "/uploads/#{current_db}/")
|
|
|
|
source = "#{public_directory}#{from}"
|
|
|
|
destination = "#{public_directory}#{to}"
|
|
|
|
|
2016-04-13 10:33:00 -04:00
|
|
|
# create destination directory & copy file unless it already exists
|
|
|
|
unless file_exists?(destination)
|
|
|
|
`mkdir -p '#{File.dirname(destination)}'`
|
|
|
|
`cp --link '#{source}' '#{destination}'`
|
|
|
|
end
|
|
|
|
|
2016-04-11 14:42:40 -04:00
|
|
|
# ensure file has been succesfuly copied over
|
2016-04-13 10:33:00 -04:00
|
|
|
raise unless file_exists?(destination)
|
|
|
|
|
2016-04-11 14:42:40 -04:00
|
|
|
# remap links in db
|
|
|
|
DbHelper.remap(from, to)
|
|
|
|
rescue
|
|
|
|
putc "!"
|
|
|
|
else
|
|
|
|
putc "."
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "Done!"
|
|
|
|
|
|
|
|
end
|
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
################################################################################
|
|
|
|
# backfill_shas #
|
|
|
|
################################################################################
|
|
|
|
|
2013-06-15 05:29:20 -04:00
|
|
|
task "uploads:backfill_shas" => :environment do
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection do |db|
|
2015-06-10 11:19:58 -04:00
|
|
|
puts "Backfilling #{db}..."
|
|
|
|
Upload.where(sha1: nil).find_each do |u|
|
|
|
|
begin
|
|
|
|
path = Discourse.store.path_for(u)
|
2016-09-02 02:50:13 -04:00
|
|
|
u.sha1 = Upload.generate_digest(path)
|
2015-06-10 11:19:58 -04:00
|
|
|
u.save!
|
2013-06-15 05:29:20 -04:00
|
|
|
putc "."
|
2016-08-23 03:05:37 -04:00
|
|
|
rescue => e
|
2016-08-28 22:30:10 -04:00
|
|
|
puts "Skipping #{u.original_filename} (#{u.url}) #{e.message}"
|
2013-06-15 05:29:20 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2015-06-10 11:19:58 -04:00
|
|
|
puts "", "Done"
|
2013-06-15 05:29:20 -04:00
|
|
|
end
|
2014-06-24 09:35:15 -04:00
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
################################################################################
|
|
|
|
# migrate_from_s3 #
|
|
|
|
################################################################################
|
|
|
|
|
2014-06-24 09:35:15 -04:00
|
|
|
task "uploads:migrate_from_s3" => :environment do
|
2016-07-25 06:12:10 -04:00
|
|
|
require "db_helper"
|
2014-06-24 09:35:15 -04:00
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
ENV["RAILS_DB"] ? migrate_from_s3 : migrate_all_from_s3
|
|
|
|
end
|
2014-06-24 09:35:15 -04:00
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
def guess_filename(url, raw)
|
|
|
|
begin
|
|
|
|
uri = URI.parse("http:#{url}")
|
|
|
|
f = uri.open("rb", read_timeout: 5, redirect: true, allow_redirections: :all)
|
|
|
|
filename = if f.meta && f.meta["content-disposition"]
|
|
|
|
f.meta["content-disposition"][/filename="([^"]+)"/, 1].presence
|
|
|
|
end
|
|
|
|
filename ||= raw[/<a class="attachment" href="(?:https?:)?#{Regexp.escape(url)}">([^<]+)<\/a>/, 1].presence
|
|
|
|
filename ||= File.basename(url)
|
|
|
|
filename
|
|
|
|
rescue
|
|
|
|
nil
|
|
|
|
ensure
|
|
|
|
f.try(:close!) rescue nil
|
|
|
|
end
|
|
|
|
end
|
2015-03-18 13:23:55 -04:00
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
def migrate_all_from_s3
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection { migrate_from_s3 }
|
|
|
|
end
|
2015-03-18 13:23:55 -04:00
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
def migrate_from_s3
|
|
|
|
require "file_store/s3_store"
|
2014-06-24 09:35:15 -04:00
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
# make sure S3 is disabled
|
|
|
|
if SiteSetting.enable_s3_uploads
|
|
|
|
puts "You must disable S3 uploads before running that task."
|
|
|
|
return
|
|
|
|
end
|
2014-06-24 09:35:15 -04:00
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
# make sure S3 bucket is set
|
|
|
|
if SiteSetting.s3_upload_bucket.blank?
|
|
|
|
puts "The S3 upload bucket must be set before running that task."
|
|
|
|
return
|
|
|
|
end
|
2014-06-24 09:35:15 -04:00
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
db = RailsMultisite::ConnectionManagement.current_db
|
2014-06-24 09:35:15 -04:00
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
puts "Migrating uploads from S3 to local storage for '#{db}'..."
|
2014-06-24 09:35:15 -04:00
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
s3_base_url = FileStore::S3Store.new.absolute_base_url
|
|
|
|
max_file_size_kb = [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
|
2014-06-24 09:35:15 -04:00
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
Post.unscoped.find_each do |post|
|
|
|
|
if post.raw[s3_base_url]
|
|
|
|
post.raw.scan(/(#{Regexp.escape(s3_base_url)}\/(\d+)(\h{40})\.\w+)/).each do |url, id, sha|
|
|
|
|
begin
|
|
|
|
puts "POST ID: #{post.id}"
|
|
|
|
puts "UPLOAD ID: #{id}"
|
|
|
|
puts "UPLOAD SHA: #{sha}"
|
|
|
|
puts "UPLOAD URL: #{url}"
|
|
|
|
if filename = guess_filename(url, post.raw)
|
|
|
|
puts "FILENAME: #{filename}"
|
2017-05-24 13:42:52 -04:00
|
|
|
file = FileHelper.download(
|
|
|
|
"http:#{url}",
|
|
|
|
max_file_size: 20.megabytes,
|
|
|
|
tmp_file_name: "from_s3",
|
|
|
|
follow_redirect: true
|
|
|
|
)
|
2017-06-23 04:14:45 -04:00
|
|
|
if upload = UploadCreator.new(file, filename).create_for(post.user_id || -1)
|
2016-07-25 06:12:10 -04:00
|
|
|
post.raw = post.raw.gsub(/(https?:)?#{Regexp.escape(url)}/, upload.url)
|
|
|
|
post.save
|
|
|
|
post.rebake!
|
|
|
|
puts "OK :)"
|
|
|
|
else
|
|
|
|
puts "KO :("
|
|
|
|
end
|
|
|
|
puts post.full_url, ""
|
|
|
|
else
|
|
|
|
puts "NO FILENAME :("
|
|
|
|
end
|
|
|
|
rescue => e
|
|
|
|
puts "EXCEPTION: #{e.message}"
|
|
|
|
end
|
|
|
|
end
|
2014-06-24 09:35:15 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-07-25 06:12:10 -04:00
|
|
|
puts "Done!"
|
2014-06-24 09:35:15 -04:00
|
|
|
end
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
################################################################################
|
|
|
|
# migrate_to_s3 #
|
|
|
|
################################################################################
|
|
|
|
|
|
|
|
task "uploads:migrate_to_s3" => :environment do
|
|
|
|
require "file_store/s3_store"
|
|
|
|
require "file_store/local_store"
|
2015-06-12 06:02:36 -04:00
|
|
|
require "db_helper"
|
2015-05-25 11:59:00 -04:00
|
|
|
|
|
|
|
ENV["RAILS_DB"] ? migrate_to_s3 : migrate_to_s3_all_sites
|
|
|
|
end
|
|
|
|
|
|
|
|
def migrate_to_s3_all_sites
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection { migrate_to_s3 }
|
|
|
|
end
|
|
|
|
|
|
|
|
def migrate_to_s3
|
|
|
|
# make sure s3 is enabled
|
|
|
|
if !SiteSetting.enable_s3_uploads
|
|
|
|
puts "You must enable s3 uploads before running that task"
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
db = RailsMultisite::ConnectionManagement.current_db
|
|
|
|
|
|
|
|
puts "Migrating uploads to S3 (#{SiteSetting.s3_upload_bucket}) for '#{db}'..."
|
|
|
|
|
|
|
|
# will throw an exception if the bucket is missing
|
|
|
|
s3 = FileStore::S3Store.new
|
|
|
|
local = FileStore::LocalStore.new
|
|
|
|
|
|
|
|
# Migrate all uploads
|
|
|
|
Upload.where.not(sha1: nil)
|
|
|
|
.where("url NOT LIKE '#{s3.absolute_base_url}%'")
|
|
|
|
.find_each do |upload|
|
|
|
|
# remove invalid uploads
|
|
|
|
if upload.url.blank?
|
|
|
|
upload.destroy!
|
|
|
|
next
|
|
|
|
end
|
|
|
|
# store the old url
|
|
|
|
from = upload.url
|
|
|
|
# retrieve the path to the local file
|
|
|
|
path = local.path_for(upload)
|
|
|
|
# make sure the file exists locally
|
2015-11-16 05:39:38 -05:00
|
|
|
if !path or !File.exists?(path)
|
2015-05-25 11:59:00 -04:00
|
|
|
putc "X"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
begin
|
|
|
|
file = File.open(path)
|
|
|
|
content_type = `file --mime-type -b #{path}`.strip
|
|
|
|
to = s3.store_upload(file, upload, content_type)
|
|
|
|
rescue
|
|
|
|
putc "X"
|
|
|
|
next
|
|
|
|
ensure
|
|
|
|
file.try(:close!) rescue nil
|
|
|
|
end
|
|
|
|
|
|
|
|
# remap the URL
|
2015-06-12 06:02:36 -04:00
|
|
|
DbHelper.remap(from, to)
|
2015-05-25 11:59:00 -04:00
|
|
|
|
|
|
|
putc "."
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
################################################################################
|
|
|
|
# clean_up #
|
|
|
|
################################################################################
|
|
|
|
|
2014-09-29 12:31:53 -04:00
|
|
|
task "uploads:clean_up" => :environment do
|
2016-09-02 02:50:13 -04:00
|
|
|
if ENV["RAILS_DB"]
|
|
|
|
clean_up_uploads
|
|
|
|
else
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection { clean_up_uploads }
|
|
|
|
end
|
|
|
|
end
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
def clean_up_uploads
|
|
|
|
db = RailsMultisite::ConnectionManagement.current_db
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
puts "Cleaning up uploads and thumbnails for '#{db}'..."
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
if Discourse.store.external?
|
|
|
|
puts "This task only works for internal storages."
|
|
|
|
exit 1
|
|
|
|
end
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
puts <<~OUTPUT
|
|
|
|
This task will remove upload records and files permanently.
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
Would you like to take a full backup before the clean up? (Y/N)
|
|
|
|
OUTPUT
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
if STDIN.gets.chomp.downcase == 'y'
|
|
|
|
puts "Starting backup..."
|
|
|
|
backuper = BackupRestore::Backuper.new(Discourse.system_user.id)
|
|
|
|
backuper.run
|
|
|
|
exit 1 unless backuper.success
|
|
|
|
end
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
public_directory = Rails.root.join("public").to_s
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
##
|
|
|
|
## DATABASE vs FILE SYSTEM
|
|
|
|
##
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
# uploads & avatars
|
|
|
|
Upload.find_each do |upload|
|
|
|
|
path = File.join(public_directory, upload.url)
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
if !File.exists?(path)
|
|
|
|
upload.destroy!
|
|
|
|
putc "#"
|
|
|
|
else
|
|
|
|
putc "."
|
2014-09-29 12:31:53 -04:00
|
|
|
end
|
2016-09-02 02:50:13 -04:00
|
|
|
end
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
# optimized images
|
|
|
|
OptimizedImage.find_each do |optimized_image|
|
|
|
|
path = File.join(public_directory, optimized_image.url)
|
|
|
|
|
|
|
|
if !File.exists?(path)
|
|
|
|
optimized_image.destroy!
|
|
|
|
putc "#"
|
|
|
|
else
|
|
|
|
putc "."
|
2014-09-29 12:31:53 -04:00
|
|
|
end
|
2016-09-02 02:50:13 -04:00
|
|
|
end
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
##
|
|
|
|
## FILE SYSTEM vs DATABASE
|
|
|
|
##
|
2014-09-29 12:31:53 -04:00
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
uploads_directory = File.join(public_directory, 'uploads', db).to_s
|
|
|
|
|
|
|
|
# avatars (no avatar should be stored in that old directory)
|
|
|
|
FileUtils.rm_rf("#{uploads_directory}/avatars")
|
|
|
|
|
|
|
|
# uploads and optimized images
|
|
|
|
Dir.glob("#{uploads_directory}/**/*.*").each do |file_path|
|
|
|
|
sha1 = Upload.generate_digest(file_path)
|
|
|
|
url = file_path.split(public_directory, 2)[1]
|
|
|
|
|
|
|
|
if (Upload.where(sha1: sha1).empty? &&
|
|
|
|
Upload.where(url: url).empty?) &&
|
|
|
|
(OptimizedImage.where(sha1: sha1).empty? &&
|
|
|
|
OptimizedImage.where(url: url).empty?)
|
|
|
|
|
|
|
|
FileUtils.rm(file_path)
|
|
|
|
putc "#"
|
|
|
|
else
|
|
|
|
putc "."
|
|
|
|
end
|
2014-09-29 12:31:53 -04:00
|
|
|
end
|
|
|
|
|
2016-09-02 02:50:13 -04:00
|
|
|
puts "Removing empty directories..."
|
|
|
|
puts `find #{uploads_directory} -type d -empty -exec rmdir {} \\;`
|
|
|
|
|
|
|
|
puts "Done!"
|
2014-09-29 12:31:53 -04:00
|
|
|
end
|
2015-05-10 20:30:22 -04:00
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
################################################################################
|
|
|
|
# missing #
|
|
|
|
################################################################################
|
2015-05-10 20:30:22 -04:00
|
|
|
|
|
|
|
# list all missing uploads and optimized images
|
|
|
|
task "uploads:missing" => :environment do
|
2016-09-01 22:22:03 -04:00
|
|
|
if ENV["RAILS_DB"]
|
|
|
|
list_missing_uploads
|
|
|
|
else
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection do |db|
|
|
|
|
list_missing_uploads
|
2015-05-10 20:30:22 -04:00
|
|
|
end
|
2016-09-01 22:22:03 -04:00
|
|
|
end
|
|
|
|
end
|
2015-05-10 20:30:22 -04:00
|
|
|
|
2016-09-01 22:22:03 -04:00
|
|
|
def list_missing_uploads
|
|
|
|
if Discourse.store.external?
|
|
|
|
puts "This task only works for internal storages."
|
|
|
|
return
|
|
|
|
end
|
2015-05-10 20:30:22 -04:00
|
|
|
|
2016-09-01 22:22:03 -04:00
|
|
|
public_directory = "#{Rails.root}/public"
|
2015-05-10 20:30:22 -04:00
|
|
|
|
2016-09-01 22:22:03 -04:00
|
|
|
Upload.find_each do |upload|
|
2015-05-10 20:30:22 -04:00
|
|
|
|
2016-09-01 22:22:03 -04:00
|
|
|
# could be a remote image
|
|
|
|
next unless upload.url =~ /^\/[^\/]/
|
|
|
|
|
|
|
|
path = "#{public_directory}#{upload.url}"
|
|
|
|
bad = true
|
|
|
|
begin
|
|
|
|
bad = false if File.size(path) != 0
|
|
|
|
rescue
|
|
|
|
# something is messed up
|
2015-05-10 20:30:22 -04:00
|
|
|
end
|
2016-09-01 22:22:03 -04:00
|
|
|
puts path if bad
|
|
|
|
end
|
2015-05-10 20:30:22 -04:00
|
|
|
|
2016-09-01 22:22:03 -04:00
|
|
|
OptimizedImage.find_each do |optimized_image|
|
2015-05-10 20:30:22 -04:00
|
|
|
|
2016-09-01 22:22:03 -04:00
|
|
|
# remote?
|
|
|
|
next unless optimized_image.url =~ /^\/[^\/]/
|
2015-05-10 20:30:22 -04:00
|
|
|
|
2016-09-01 22:22:03 -04:00
|
|
|
path = "#{public_directory}#{optimized_image.url}"
|
2015-05-10 20:30:22 -04:00
|
|
|
|
2016-09-01 22:22:03 -04:00
|
|
|
bad = true
|
|
|
|
begin
|
|
|
|
bad = false if File.size(path) != 0
|
|
|
|
rescue
|
|
|
|
# something is messed up
|
2015-05-10 20:30:22 -04:00
|
|
|
end
|
2016-09-01 22:22:03 -04:00
|
|
|
puts path if bad
|
2015-05-10 20:30:22 -04:00
|
|
|
end
|
|
|
|
end
|
2015-05-11 06:59:50 -04:00
|
|
|
|
2016-09-21 04:50:08 -04:00
|
|
|
################################################################################
|
|
|
|
# Recover from tombstone #
|
|
|
|
################################################################################
|
|
|
|
|
|
|
|
task "uploads:recover_from_tombstone" => :environment do
|
|
|
|
if ENV["RAILS_DB"]
|
|
|
|
recover_from_tombstone
|
|
|
|
else
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection { recover_from_tombstone }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def recover_from_tombstone
|
|
|
|
if Discourse.store.external?
|
|
|
|
puts "This task only works for internal storages."
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2016-09-21 05:47:27 -04:00
|
|
|
begin
|
|
|
|
original_setting = SiteSetting.max_image_size_kb
|
|
|
|
SiteSetting.max_image_size_kb = 10240
|
2016-09-22 00:28:36 -04:00
|
|
|
current_db = RailsMultisite::ConnectionManagement.current_db
|
2016-09-21 05:47:27 -04:00
|
|
|
|
|
|
|
public_path = Rails.root.join("public")
|
2016-09-22 00:28:36 -04:00
|
|
|
paths = Dir.glob(File.join(public_path, 'uploads', 'tombstone', current_db, '**', '*.*'))
|
2016-09-21 22:39:39 -04:00
|
|
|
max = paths.length
|
2016-09-21 05:47:27 -04:00
|
|
|
|
2016-09-21 22:39:39 -04:00
|
|
|
paths.each_with_index do |path, index|
|
2016-09-21 06:01:59 -04:00
|
|
|
filename = File.basename(path)
|
2016-09-21 22:39:39 -04:00
|
|
|
printf("%9d / %d (%5.1f%%)\n", (index + 1), max, (((index + 1).to_f / max.to_f) * 100).round(1))
|
2016-09-21 05:47:27 -04:00
|
|
|
|
2016-09-21 06:01:59 -04:00
|
|
|
Post.where("raw LIKE ?", "%#{filename}%").each do |post|
|
|
|
|
doc = Nokogiri::HTML::fragment(post.raw)
|
2016-09-21 22:39:39 -04:00
|
|
|
updated = false
|
2016-09-21 04:50:08 -04:00
|
|
|
|
2016-09-21 06:01:59 -04:00
|
|
|
doc.css("img[src]").each do |img|
|
|
|
|
url = img["src"]
|
2016-09-21 04:50:08 -04:00
|
|
|
|
2016-09-21 06:01:59 -04:00
|
|
|
next unless url =~ /^\/uploads\//
|
2016-09-21 04:50:08 -04:00
|
|
|
|
2016-09-21 06:01:59 -04:00
|
|
|
upload = Upload.find_by(url: url)
|
2016-09-21 04:50:08 -04:00
|
|
|
|
2016-09-21 06:01:59 -04:00
|
|
|
if !upload && url
|
2016-09-22 00:28:36 -04:00
|
|
|
printf "Restoring #{path}..."
|
2016-09-21 06:01:59 -04:00
|
|
|
tombstone_path = File.join(public_path, 'uploads', 'tombstone', url.gsub(/^\/uploads\//, ""))
|
|
|
|
|
|
|
|
if File.exists?(tombstone_path)
|
|
|
|
File.open(tombstone_path) do |file|
|
2017-06-22 11:00:05 -04:00
|
|
|
new_upload = UploadCreator.new(file, File.basename(url)).create_for(Discourse::SYSTEM_USER_ID)
|
2016-09-21 06:01:59 -04:00
|
|
|
|
|
|
|
if new_upload.persisted?
|
|
|
|
printf "Restored into #{new_upload.url}\n"
|
|
|
|
DbHelper.remap(url, new_upload.url)
|
2016-09-21 22:39:39 -04:00
|
|
|
updated = true
|
2016-09-21 06:01:59 -04:00
|
|
|
else
|
|
|
|
puts "Failed to create upload for #{url}: #{new_upload.errors.full_messages}."
|
|
|
|
end
|
2016-09-21 05:47:27 -04:00
|
|
|
end
|
2016-09-21 06:01:59 -04:00
|
|
|
else
|
|
|
|
puts "Failed to find file (#{tombstone_path}) in tombstone."
|
2016-09-21 05:47:27 -04:00
|
|
|
end
|
2016-09-21 04:50:08 -04:00
|
|
|
end
|
|
|
|
end
|
2016-09-21 22:39:39 -04:00
|
|
|
|
|
|
|
post.rebake! if updated
|
2016-09-21 04:50:08 -04:00
|
|
|
end
|
|
|
|
end
|
2016-09-21 05:47:27 -04:00
|
|
|
ensure
|
|
|
|
SiteSetting.max_image_size_kb = original_setting
|
2016-09-21 04:50:08 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
################################################################################
|
|
|
|
# regenerate_missing_optimized #
|
|
|
|
################################################################################
|
|
|
|
|
2015-05-11 06:59:50 -04:00
|
|
|
# regenerate missing optimized images
|
|
|
|
task "uploads:regenerate_missing_optimized" => :environment do
|
2016-09-02 01:06:31 -04:00
|
|
|
if ENV["RAILS_DB"]
|
|
|
|
regenerate_missing_optimized
|
|
|
|
else
|
|
|
|
RailsMultisite::ConnectionManagement.each_connection { regenerate_missing_optimized }
|
|
|
|
end
|
2015-05-11 10:19:16 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def regenerate_missing_optimized
|
2015-05-11 13:07:39 -04:00
|
|
|
db = RailsMultisite::ConnectionManagement.current_db
|
|
|
|
|
|
|
|
puts "Regenerating missing optimized images for '#{db}'..."
|
2015-05-11 06:59:50 -04:00
|
|
|
|
|
|
|
if Discourse.store.external?
|
|
|
|
puts "This task only works for internal storages."
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
public_directory = "#{Rails.root}/public"
|
|
|
|
missing_uploads = Set.new
|
|
|
|
|
2016-08-25 06:29:52 -04:00
|
|
|
avatar_upload_ids = UserAvatar.all.pluck(:custom_upload_id, :gravatar_upload_id).flatten.compact
|
2015-05-11 06:59:50 -04:00
|
|
|
|
2016-08-25 06:29:52 -04:00
|
|
|
default_scope = OptimizedImage.includes(:upload)
|
2015-05-11 13:07:39 -04:00
|
|
|
|
2016-08-25 06:29:52 -04:00
|
|
|
[
|
|
|
|
default_scope
|
|
|
|
.where("optimized_images.upload_id IN (?)", avatar_upload_ids),
|
2015-05-11 09:41:52 -04:00
|
|
|
|
2016-08-25 06:29:52 -04:00
|
|
|
default_scope
|
|
|
|
.where("optimized_images.upload_id NOT IN (?)", avatar_upload_ids)
|
|
|
|
.where("LENGTH(COALESCE(url, '')) > 0")
|
|
|
|
.where("width > 0 AND height > 0")
|
|
|
|
].each do |scope|
|
|
|
|
scope.find_each do |optimized_image|
|
|
|
|
upload = optimized_image.upload
|
2015-05-11 06:59:50 -04:00
|
|
|
|
2016-08-25 06:29:52 -04:00
|
|
|
next unless optimized_image.url =~ /^\/[^\/]/
|
|
|
|
next unless upload.url =~ /^\/[^\/]/
|
|
|
|
|
|
|
|
thumbnail = "#{public_directory}#{optimized_image.url}"
|
|
|
|
original = "#{public_directory}#{upload.url}"
|
|
|
|
|
|
|
|
if !File.exists?(thumbnail) || File.size(thumbnail) <= 0
|
|
|
|
# make sure the original image exists locally
|
|
|
|
if (!File.exists?(original) || File.size(original) <= 0) && upload.origin.present?
|
|
|
|
# try to fix it by redownloading it
|
|
|
|
begin
|
2017-05-24 13:42:52 -04:00
|
|
|
downloaded = FileHelper.download(
|
|
|
|
upload.origin,
|
|
|
|
max_file_size: SiteSetting.max_image_size_kb.kilobytes,
|
|
|
|
tmp_file_name: "discourse-missing",
|
|
|
|
follow_redirect: true
|
|
|
|
) rescue nil
|
2016-08-25 06:29:52 -04:00
|
|
|
if downloaded && downloaded.size > 0
|
|
|
|
FileUtils.mkdir_p(File.dirname(original))
|
|
|
|
File.open(original, "wb") { |f| f.write(downloaded.read) }
|
|
|
|
end
|
|
|
|
ensure
|
|
|
|
downloaded.try(:close!) if downloaded.respond_to?(:close!)
|
2015-05-11 13:07:39 -04:00
|
|
|
end
|
2015-05-11 11:03:48 -04:00
|
|
|
end
|
|
|
|
|
2016-08-25 06:29:52 -04:00
|
|
|
if File.exists?(original) && File.size(original) > 0
|
|
|
|
FileUtils.mkdir_p(File.dirname(thumbnail))
|
|
|
|
OptimizedImage.resize(original, thumbnail, optimized_image.width, optimized_image.height)
|
|
|
|
putc "#"
|
|
|
|
else
|
|
|
|
missing_uploads << original
|
|
|
|
putc "X"
|
|
|
|
end
|
2015-05-11 06:59:50 -04:00
|
|
|
else
|
2016-08-25 06:29:52 -04:00
|
|
|
putc "."
|
2015-05-11 06:59:50 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "Done"
|
|
|
|
|
|
|
|
if missing_uploads.size > 0
|
|
|
|
puts "Missing uploads:"
|
|
|
|
missing_uploads.sort.each { |u| puts u }
|
|
|
|
end
|
|
|
|
end
|
2015-05-19 06:31:51 -04:00
|
|
|
|
2015-05-25 11:59:00 -04:00
|
|
|
################################################################################
|
2015-06-12 06:02:36 -04:00
|
|
|
# migrate_to_new_scheme #
|
2015-05-25 11:59:00 -04:00
|
|
|
################################################################################
|
|
|
|
|
2015-06-12 06:02:36 -04:00
|
|
|
task "uploads:start_migration" => :environment do
|
|
|
|
SiteSetting.migrate_to_new_scheme = true
|
|
|
|
puts "Migration started!"
|
2015-05-19 06:31:51 -04:00
|
|
|
end
|
|
|
|
|
2015-06-12 06:02:36 -04:00
|
|
|
task "uploads:stop_migration" => :environment do
|
|
|
|
SiteSetting.migrate_to_new_scheme = false
|
|
|
|
puts "Migration stoped!"
|
2015-05-19 06:31:51 -04:00
|
|
|
end
|
2016-09-01 03:19:14 -04:00
|
|
|
|
|
|
|
task "uploads:analyze", [:cache_path, :limit] => :environment do |_, args|
|
|
|
|
now = Time.zone.now
|
|
|
|
current_db = RailsMultisite::ConnectionManagement.current_db
|
|
|
|
|
|
|
|
puts "Analyzing uploads for '#{current_db}'... This may take awhile...\n"
|
|
|
|
cache_path = args[:cache_path]
|
|
|
|
|
|
|
|
current_db = RailsMultisite::ConnectionManagement.current_db
|
|
|
|
uploads_path = Rails.root.join('public', 'uploads', current_db)
|
|
|
|
|
|
|
|
path =
|
|
|
|
if cache_path
|
|
|
|
cache_path
|
|
|
|
else
|
|
|
|
path = "/tmp/#{current_db}-#{now.to_i}-paths.txt"
|
|
|
|
FileUtils.touch("/tmp/#{now.to_i}-paths.txt")
|
|
|
|
`find #{uploads_path} -type f -printf '%s %h/%f\n' > #{path}`
|
|
|
|
path
|
|
|
|
end
|
|
|
|
|
|
|
|
extensions = {}
|
|
|
|
paths_count = 0
|
|
|
|
|
|
|
|
File.readlines(path).each do |line|
|
|
|
|
size, file_path = line.split(" ", 2)
|
|
|
|
|
|
|
|
paths_count += 1
|
|
|
|
extension = File.extname(file_path).chomp.downcase
|
|
|
|
extensions[extension] ||= {}
|
|
|
|
extensions[extension]["count"] ||= 0
|
|
|
|
extensions[extension]["count"] += 1
|
|
|
|
extensions[extension]["size"] ||= 0
|
|
|
|
extensions[extension]["size"] += size.to_i
|
|
|
|
end
|
|
|
|
|
|
|
|
uploads_count = Upload.count
|
|
|
|
optimized_images_count = OptimizedImage.count
|
|
|
|
|
|
|
|
puts <<~REPORT
|
|
|
|
Report for '#{current_db}'
|
|
|
|
-----------#{'-' * current_db.length}
|
|
|
|
Number of `Upload` records in DB: #{uploads_count}
|
|
|
|
Number of `OptimizedImage` records in DB: #{optimized_images_count}
|
|
|
|
**Total DB records: #{uploads_count + optimized_images_count}**
|
|
|
|
|
|
|
|
Number of images in uploads folder: #{paths_count}
|
|
|
|
------------------------------------#{'-' * paths_count.to_s.length}
|
|
|
|
|
|
|
|
REPORT
|
|
|
|
|
|
|
|
helper = Class.new do
|
|
|
|
include ActionView::Helpers::NumberHelper
|
|
|
|
end
|
|
|
|
|
|
|
|
helper = helper.new
|
|
|
|
|
|
|
|
printf "%-15s | %-15s | %-15s\n", 'extname', 'total size', 'count'
|
|
|
|
puts "-" * 45
|
|
|
|
|
|
|
|
extensions.sort_by { |_, value| value['size'] }.reverse.each do |extname, value|
|
|
|
|
printf "%-15s | %-15s | %-15s\n", extname, helper.number_to_human_size(value['size']), value['count']
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "\n"
|
|
|
|
|
|
|
|
limit = args[:limit] || 10
|
|
|
|
|
|
|
|
sql = <<~SQL
|
|
|
|
SELECT
|
|
|
|
users.username,
|
|
|
|
COUNT(uploads.user_id) AS num_of_uploads,
|
|
|
|
SUM(uploads.filesize) AS total_size_of_uploads,
|
|
|
|
COUNT(optimized_images.id) AS num_of_optimized_images
|
|
|
|
FROM users
|
|
|
|
INNER JOIN uploads ON users.id = uploads.user_id
|
|
|
|
INNER JOIN optimized_images ON uploads.id = optimized_images.upload_id
|
|
|
|
GROUP BY users.id
|
|
|
|
ORDER BY total_size_of_uploads DESC
|
|
|
|
LIMIT #{limit}
|
|
|
|
SQL
|
|
|
|
|
|
|
|
puts "Users using the most disk space"
|
|
|
|
puts "-------------------------------\n"
|
|
|
|
printf "%-25s | %-25s | %-25s | %-25s\n", 'username', 'total size of uploads', 'number of uploads', 'number of optimized images'
|
|
|
|
puts "-" * 110
|
|
|
|
|
|
|
|
User.exec_sql(sql).values.each do |username, num_of_uploads, total_size_of_uploads, num_of_optimized_images|
|
|
|
|
printf "%-25s | %-25s | %-25s | %-25s\n", username, helper.number_to_human_size(total_size_of_uploads), num_of_uploads, num_of_optimized_images
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "\n"
|
|
|
|
puts "List of file paths @ #{path}"
|
|
|
|
puts "Duration: #{Time.zone.now - now} seconds"
|
|
|
|
end
|