2013-11-20 07:10:08 -05:00
|
|
|
require_dependency 'url_helper'
|
2014-04-14 16:55:57 -04:00
|
|
|
require_dependency 'file_helper'
|
2017-05-10 18:16:57 -04:00
|
|
|
require_dependency 'upload_creator'
|
2013-11-20 07:10:08 -05:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
module Jobs
|
|
|
|
|
|
|
|
class PullHotlinkedImages < Jobs::Base
|
2016-04-06 22:56:43 -04:00
|
|
|
sidekiq_options queue: 'low'
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def initialize
|
2013-11-13 11:30:48 -05:00
|
|
|
@max_size = SiteSetting.max_image_size_kb.kilobytes
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2017-10-11 17:11:44 -04:00
|
|
|
def download(src)
|
|
|
|
downloaded = nil
|
|
|
|
|
|
|
|
begin
|
|
|
|
retries ||= 3
|
|
|
|
|
|
|
|
downloaded = FileHelper.download(
|
|
|
|
src,
|
|
|
|
max_file_size: @max_size,
|
|
|
|
tmp_file_name: "discourse-hotlinked",
|
|
|
|
follow_redirect: true
|
|
|
|
)
|
|
|
|
rescue
|
|
|
|
if (retries -= 1) > 0
|
|
|
|
sleep 1
|
|
|
|
retry
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
downloaded
|
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def execute(args)
|
2013-11-15 09:22:18 -05:00
|
|
|
return unless SiteSetting.download_remote_images_to_local?
|
2013-11-05 13:04:47 -05:00
|
|
|
|
|
|
|
post_id = args[:post_id]
|
|
|
|
raise Discourse::InvalidParameters.new(:post_id) unless post_id.present?
|
|
|
|
|
2014-05-06 09:41:59 -04:00
|
|
|
post = Post.find_by(id: post_id)
|
2013-11-05 13:04:47 -05:00
|
|
|
return unless post.present?
|
|
|
|
|
|
|
|
raw = post.raw.dup
|
2014-04-21 17:08:17 -04:00
|
|
|
start_raw = raw.dup
|
2017-11-16 09:45:07 -05:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
downloaded_urls = {}
|
2017-11-15 23:13:15 -05:00
|
|
|
|
2017-11-16 09:45:07 -05:00
|
|
|
large_images = JSON.parse(post.custom_fields[Post::LARGE_IMAGES].presence || "[]") rescue []
|
|
|
|
broken_images = JSON.parse(post.custom_fields[Post::BROKEN_IMAGES].presence || "[]") rescue []
|
|
|
|
downloaded_images = JSON.parse(post.custom_fields[Post::DOWNLOADED_IMAGES].presence || "{}") rescue {}
|
2017-11-15 23:13:15 -05:00
|
|
|
|
2017-11-16 09:45:07 -05:00
|
|
|
has_new_large_image = false
|
|
|
|
has_new_broken_image = false
|
|
|
|
has_downloaded_image = false
|
2013-11-05 13:04:47 -05:00
|
|
|
|
|
|
|
extract_images_from(post.cooked).each do |image|
|
2017-01-16 05:50:07 -05:00
|
|
|
src = original_src = image['src']
|
2017-11-16 09:45:07 -05:00
|
|
|
src = "#{SiteSetting.force_https ? "https" : "http"}:#{src}" if src.start_with?("//")
|
2013-11-05 13:04:47 -05:00
|
|
|
|
|
|
|
if is_valid_image_url(src)
|
|
|
|
begin
|
|
|
|
# have we already downloaded that file?
|
2017-11-16 09:45:07 -05:00
|
|
|
unless downloaded_images.include?(src) || large_images.include?(src) || broken_images.include?(src)
|
2017-10-11 17:11:44 -04:00
|
|
|
if hotlinked = download(src)
|
2015-08-17 12:57:28 -04:00
|
|
|
if File.size(hotlinked.path) <= @max_size
|
2014-09-26 12:27:10 -04:00
|
|
|
filename = File.basename(URI.parse(src).path)
|
2017-06-13 07:27:05 -04:00
|
|
|
filename << File.extname(hotlinked.path) unless filename["."]
|
2017-05-10 18:16:57 -04:00
|
|
|
upload = UploadCreator.new(hotlinked, filename, origin: src).create_for(post.user_id)
|
2017-06-13 07:27:05 -04:00
|
|
|
if upload.persisted?
|
|
|
|
downloaded_urls[src] = upload.url
|
2017-11-16 09:45:07 -05:00
|
|
|
downloaded_images[src.sub(/^https?:/i, "")] = upload.id
|
|
|
|
has_downloaded_image = true
|
2017-06-13 07:27:05 -04:00
|
|
|
else
|
2017-10-18 17:14:13 -04:00
|
|
|
log(:info, "Failed to pull hotlinked image for post: #{post_id}: #{src} - #{upload.errors.full_messages.join("\n")}")
|
2017-06-13 07:27:05 -04:00
|
|
|
end
|
2014-09-26 12:27:10 -04:00
|
|
|
else
|
2017-11-16 09:45:07 -05:00
|
|
|
large_images << original_src.sub(/^https?:/i, "")
|
|
|
|
has_new_large_image = true
|
2014-09-26 12:27:10 -04:00
|
|
|
end
|
2013-11-05 13:04:47 -05:00
|
|
|
else
|
2017-11-16 09:45:07 -05:00
|
|
|
broken_images << original_src.sub(/^https?:/i, "")
|
|
|
|
has_new_broken_image = true
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
end
|
2013-12-21 02:19:22 -05:00
|
|
|
# have we successfully downloaded that file?
|
2013-11-05 13:04:47 -05:00
|
|
|
if downloaded_urls[src].present?
|
|
|
|
url = downloaded_urls[src]
|
2017-01-16 05:50:07 -05:00
|
|
|
escaped_src = Regexp.escape(original_src)
|
2013-11-20 07:10:08 -05:00
|
|
|
# there are 6 ways to insert an image in a post
|
2013-11-05 13:04:47 -05:00
|
|
|
# HTML tag - <img src="http://...">
|
|
|
|
raw.gsub!(/src=["']#{escaped_src}["']/i, "src='#{url}'")
|
|
|
|
# BBCode tag - [img]http://...[/img]
|
|
|
|
raw.gsub!(/\[img\]#{escaped_src}\[\/img\]/i, "[img]#{url}[/img]")
|
2013-11-20 07:10:08 -05:00
|
|
|
# Markdown linked image - [![alt](http://...)](http://...)
|
|
|
|
raw.gsub!(/\[!\[([^\]]*)\]\(#{escaped_src}\)\]/) { "[<img src='#{url}' alt='#{$1}'>]" }
|
2013-11-05 13:04:47 -05:00
|
|
|
# Markdown inline - ![alt](http://...)
|
|
|
|
raw.gsub!(/!\[([^\]]*)\]\(#{escaped_src}\)/) { "![#{$1}](#{url})" }
|
2016-09-01 02:25:40 -04:00
|
|
|
# Markdown inline - ![](http://... "image title")
|
|
|
|
raw.gsub!(/!\[\]\(#{escaped_src} "([^\]]*)"\)/) { "![](#{url})" }
|
2016-09-01 08:26:39 -04:00
|
|
|
# Markdown inline - ![alt](http://... "image title")
|
|
|
|
raw.gsub!(/!\[([^\]]*)\]\(#{escaped_src} "([^\]]*)"\)/) { "![](#{url})" }
|
2013-11-05 13:04:47 -05:00
|
|
|
# Markdown reference - [x]: http://
|
2015-11-09 10:37:51 -05:00
|
|
|
raw.gsub!(/\[([^\]]+)\]:\s?#{escaped_src}/) { "[#{$1}]: #{url}" }
|
2013-11-05 13:04:47 -05:00
|
|
|
# Direct link
|
2016-02-15 06:34:45 -05:00
|
|
|
raw.gsub!(/^#{escaped_src}(\s?)$/) { "<img src='#{url}'>#{$1}" }
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
rescue => e
|
2017-09-27 19:00:13 -04:00
|
|
|
log(:error, "Failed to pull hotlinked image (#{src}) post: #{post_id}\n" + e.message + "\n" + e.backtrace.join("\n"))
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-16 09:45:07 -05:00
|
|
|
large_images.uniq!
|
|
|
|
broken_images.uniq!
|
|
|
|
|
|
|
|
post.custom_fields[Post::LARGE_IMAGES] = large_images.to_json if large_images.present?
|
|
|
|
post.custom_fields[Post::BROKEN_IMAGES] = broken_images.to_json if broken_images.present?
|
|
|
|
post.custom_fields[Post::DOWNLOADED_IMAGES] = downloaded_images.to_json if downloaded_images.present?
|
|
|
|
# only save custom fields if there are any
|
|
|
|
post.save_custom_fields if large_images.present? || broken_images.present? || downloaded_images.present?
|
2017-11-15 23:13:15 -05:00
|
|
|
|
2014-04-21 17:08:17 -04:00
|
|
|
post.reload
|
2017-11-15 05:30:47 -05:00
|
|
|
|
2015-10-30 17:46:46 -04:00
|
|
|
if start_raw == post.raw && raw != post.raw
|
2014-10-27 17:06:43 -04:00
|
|
|
changes = { raw: raw, edit_reason: I18n.t("upload.edit_reason") }
|
2017-11-16 09:45:07 -05:00
|
|
|
post.revise(Discourse.system_user, changes, bypass_bump: true)
|
|
|
|
elsif has_downloaded_image || has_new_large_image || has_new_broken_image
|
2017-06-02 05:39:06 -04:00
|
|
|
post.trigger_post_process(true)
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_images_from(html)
|
|
|
|
doc = Nokogiri::HTML::fragment(html)
|
2017-06-02 05:39:06 -04:00
|
|
|
doc.css("img[src]") - doc.css("img.avatar")
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def is_valid_image_url(src)
|
2014-05-07 13:49:16 -04:00
|
|
|
# make sure we actually have a url
|
|
|
|
return false unless src.present?
|
|
|
|
# we don't want to pull uploaded images
|
|
|
|
return false if Discourse.store.has_been_uploaded?(src)
|
2014-09-26 12:27:10 -04:00
|
|
|
# we don't want to pull relative images
|
|
|
|
return false if src =~ /\A\/[^\/]/i
|
2017-07-06 04:55:28 -04:00
|
|
|
|
2014-05-07 13:49:16 -04:00
|
|
|
# parse the src
|
|
|
|
begin
|
|
|
|
uri = URI.parse(src)
|
|
|
|
rescue URI::InvalidURIError
|
|
|
|
return false
|
|
|
|
end
|
2017-07-06 04:55:28 -04:00
|
|
|
|
|
|
|
hostname = uri.hostname
|
|
|
|
return false unless hostname
|
|
|
|
|
2014-05-07 13:49:16 -04:00
|
|
|
# we don't want to pull images hosted on the CDN (if we use one)
|
2017-07-06 04:55:28 -04:00
|
|
|
return false if Discourse.asset_host.present? && URI.parse(Discourse.asset_host).hostname == hostname
|
2017-10-06 01:20:01 -04:00
|
|
|
return false if SiteSetting.Upload.s3_cdn_url.present? && URI.parse(SiteSetting.Upload.s3_cdn_url).hostname == hostname
|
2014-05-07 13:49:16 -04:00
|
|
|
# we don't want to pull images hosted on the main domain
|
2017-07-06 04:55:28 -04:00
|
|
|
return false if URI.parse(Discourse.base_url_no_prefix).hostname == hostname
|
2014-05-07 13:49:16 -04:00
|
|
|
# check the domains blacklist
|
2014-04-21 16:59:53 -04:00
|
|
|
SiteSetting.should_download_images?(src)
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2017-07-04 21:34:24 -04:00
|
|
|
def log(log_level, message)
|
|
|
|
Rails.logger.public_send(
|
|
|
|
log_level,
|
|
|
|
"#{RailsMultisite::ConnectionManagement.current_db}: #{message}"
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|