discourse/app/jobs/regular/pull_hotlinked_images.rb

118 lines
4.3 KiB
Ruby
Raw Normal View History

require_dependency 'url_helper'
2014-04-14 16:55:57 -04:00
require_dependency 'file_helper'
2013-11-05 13:04:47 -05:00
module Jobs
class PullHotlinkedImages < Jobs::Base
include UrlHelper
2013-11-05 13:04:47 -05:00
def initialize
# maximum size of the file in bytes
@max_size = SiteSetting.max_image_size_kb.kilobytes
2013-11-05 13:04:47 -05:00
end
def execute(args)
return unless SiteSetting.download_remote_images_to_local?
2013-11-05 13:04:47 -05:00
post_id = args[:post_id]
raise Discourse::InvalidParameters.new(:post_id) unless post_id.present?
post = Post.find_by(id: post_id)
2013-11-05 13:04:47 -05:00
return unless post.present?
raw = post.raw.dup
start_raw = raw.dup
2013-11-05 13:04:47 -05:00
downloaded_urls = {}
extract_images_from(post.cooked).each do |image|
src = image['src']
src = "http:" + src if src.start_with?("//")
2013-11-05 13:04:47 -05:00
if is_valid_image_url(src)
hotlinked = nil
2013-11-05 13:04:47 -05:00
begin
# have we already downloaded that file?
unless downloaded_urls.include?(src)
begin
hotlinked = FileHelper.download(src, @max_size, "discourse-hotlinked")
rescue Discourse::InvalidParameters
end
2013-11-15 17:28:16 -05:00
if hotlinked.try(:size) <= @max_size
2013-11-05 13:04:47 -05:00
filename = File.basename(URI.parse(src).path)
upload = Upload.create_for(post.user_id, hotlinked, filename, hotlinked.size, { origin: src })
2013-11-05 13:04:47 -05:00
downloaded_urls[src] = upload.url
else
2014-04-14 16:55:57 -04:00
Rails.logger.error("Failed to pull hotlinked image: #{src} - Image is bigger than #{@max_size}")
2013-11-05 13:04:47 -05:00
end
end
2013-12-21 02:19:22 -05:00
# have we successfully downloaded that file?
2013-11-05 13:04:47 -05:00
if downloaded_urls[src].present?
url = downloaded_urls[src]
2014-06-11 10:37:26 -04:00
escaped_src = Regexp.escape(src)
# there are 6 ways to insert an image in a post
2013-11-05 13:04:47 -05:00
# HTML tag - <img src="http://...">
raw.gsub!(/src=["']#{escaped_src}["']/i, "src='#{url}'")
# BBCode tag - [img]http://...[/img]
raw.gsub!(/\[img\]#{escaped_src}\[\/img\]/i, "[img]#{url}[/img]")
# Markdown linked image - [![alt](http://...)](http://...)
raw.gsub!(/\[!\[([^\]]*)\]\(#{escaped_src}\)\]/) { "[<img src='#{url}' alt='#{$1}'>]" }
2013-11-05 13:04:47 -05:00
# Markdown inline - ![alt](http://...)
raw.gsub!(/!\[([^\]]*)\]\(#{escaped_src}\)/) { "![#{$1}](#{url})" }
# Markdown reference - [x]: http://
raw.gsub!(/\[(\d+)\]: #{escaped_src}/) { "[#{$1}]: #{url}" }
# Direct link
raw.gsub!(src, "<img src='#{url}'>")
end
rescue => e
2014-04-14 16:55:57 -04:00
Rails.logger.error("Failed to pull hotlinked image: #{src}\n" + e.message + "\n" + e.backtrace.join("\n"))
2013-11-05 13:04:47 -05:00
ensure
# close & delete the temp file
hotlinked && hotlinked.close!
end
end
end
post.reload
if start_raw != post.raw
# post was edited - start over (after 10 minutes)
backoff = args.fetch(:backoff, 1) + 1
delay = SiteSetting.ninja_edit_window * args[:backoff]
Jobs.enqueue_in(delay.seconds.to_i, :pull_hotlinked_images, args.merge!(backoff: backoff))
elsif raw != post.raw
options = {
edit_reason: I18n.t("upload.edit_reason"),
bypass_bump: true # we never want that job to bump the topic
}
2013-11-05 13:04:47 -05:00
post.revise(Discourse.system_user, raw, options)
end
end
def extract_images_from(html)
doc = Nokogiri::HTML::fragment(html)
doc.css("img") - doc.css(".onebox-result img") - doc.css("img.avatar")
end
def is_valid_image_url(src)
# make sure we actually have a url
return false unless src.present?
# we don't want to pull uploaded images
return false if Discourse.store.has_been_uploaded?(src)
# parse the src
begin
uri = URI.parse(src)
rescue URI::InvalidURIError
return false
end
# we don't want to pull images hosted on the CDN (if we use one)
return false if Discourse.asset_host.present? && URI.parse(Discourse.asset_host).hostname == uri.hostname
# we don't want to pull images hosted on the main domain
return false if URI.parse(Discourse.base_url_no_prefix).hostname == uri.hostname
# check the domains blacklist
SiteSetting.should_download_images?(src)
2013-11-05 13:04:47 -05:00
end
end
end