2013-11-20 07:10:08 -05:00
|
|
|
require_dependency 'url_helper'
|
2014-04-14 16:55:57 -04:00
|
|
|
require_dependency 'file_helper'
|
2013-11-20 07:10:08 -05:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
module Jobs
|
|
|
|
|
|
|
|
class PullHotlinkedImages < Jobs::Base
|
2013-11-20 07:10:08 -05:00
|
|
|
include UrlHelper
|
2013-11-05 13:04:47 -05:00
|
|
|
|
|
|
|
def initialize
|
|
|
|
# maximum size of the file in bytes
|
2013-11-13 11:30:48 -05:00
|
|
|
@max_size = SiteSetting.max_image_size_kb.kilobytes
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def execute(args)
|
2013-11-15 09:22:18 -05:00
|
|
|
return unless SiteSetting.download_remote_images_to_local?
|
2013-11-05 13:04:47 -05:00
|
|
|
|
|
|
|
post_id = args[:post_id]
|
|
|
|
raise Discourse::InvalidParameters.new(:post_id) unless post_id.present?
|
|
|
|
|
2014-05-06 09:41:59 -04:00
|
|
|
post = Post.find_by(id: post_id)
|
2013-11-05 13:04:47 -05:00
|
|
|
return unless post.present?
|
|
|
|
|
|
|
|
raw = post.raw.dup
|
2014-04-21 17:08:17 -04:00
|
|
|
start_raw = raw.dup
|
2013-11-05 13:04:47 -05:00
|
|
|
downloaded_urls = {}
|
|
|
|
|
|
|
|
extract_images_from(post.cooked).each do |image|
|
|
|
|
src = image['src']
|
2013-11-25 13:47:53 -05:00
|
|
|
src = "http:" + src if src.start_with?("//")
|
2013-11-05 13:04:47 -05:00
|
|
|
|
|
|
|
if is_valid_image_url(src)
|
2014-04-21 17:08:17 -04:00
|
|
|
hotlinked = nil
|
2013-11-05 13:04:47 -05:00
|
|
|
begin
|
|
|
|
# have we already downloaded that file?
|
2014-04-21 17:08:17 -04:00
|
|
|
unless downloaded_urls.include?(src)
|
2014-04-22 09:32:48 -04:00
|
|
|
begin
|
|
|
|
hotlinked = FileHelper.download(src, @max_size, "discourse-hotlinked")
|
|
|
|
rescue Discourse::InvalidParameters
|
|
|
|
end
|
2013-11-15 17:28:16 -05:00
|
|
|
if hotlinked.try(:size) <= @max_size
|
2013-11-05 13:04:47 -05:00
|
|
|
filename = File.basename(URI.parse(src).path)
|
2014-04-15 11:15:47 -04:00
|
|
|
upload = Upload.create_for(post.user_id, hotlinked, filename, hotlinked.size, { origin: src })
|
2013-11-05 13:04:47 -05:00
|
|
|
downloaded_urls[src] = upload.url
|
|
|
|
else
|
2014-04-14 16:55:57 -04:00
|
|
|
Rails.logger.error("Failed to pull hotlinked image: #{src} - Image is bigger than #{@max_size}")
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
end
|
2013-12-21 02:19:22 -05:00
|
|
|
# have we successfully downloaded that file?
|
2013-11-05 13:04:47 -05:00
|
|
|
if downloaded_urls[src].present?
|
|
|
|
url = downloaded_urls[src]
|
2014-06-11 10:37:26 -04:00
|
|
|
escaped_src = Regexp.escape(src)
|
2013-11-20 07:10:08 -05:00
|
|
|
# there are 6 ways to insert an image in a post
|
2013-11-05 13:04:47 -05:00
|
|
|
# HTML tag - <img src="http://...">
|
|
|
|
raw.gsub!(/src=["']#{escaped_src}["']/i, "src='#{url}'")
|
|
|
|
# BBCode tag - [img]http://...[/img]
|
|
|
|
raw.gsub!(/\[img\]#{escaped_src}\[\/img\]/i, "[img]#{url}[/img]")
|
2013-11-20 07:10:08 -05:00
|
|
|
# Markdown linked image - [![alt](http://...)](http://...)
|
|
|
|
raw.gsub!(/\[!\[([^\]]*)\]\(#{escaped_src}\)\]/) { "[<img src='#{url}' alt='#{$1}'>]" }
|
2013-11-05 13:04:47 -05:00
|
|
|
# Markdown inline - ![alt](http://...)
|
|
|
|
raw.gsub!(/!\[([^\]]*)\]\(#{escaped_src}\)/) { "![#{$1}](#{url})" }
|
|
|
|
# Markdown reference - [x]: http://
|
|
|
|
raw.gsub!(/\[(\d+)\]: #{escaped_src}/) { "[#{$1}]: #{url}" }
|
|
|
|
# Direct link
|
|
|
|
raw.gsub!(src, "<img src='#{url}'>")
|
|
|
|
end
|
|
|
|
rescue => e
|
2014-04-14 16:55:57 -04:00
|
|
|
Rails.logger.error("Failed to pull hotlinked image: #{src}\n" + e.message + "\n" + e.backtrace.join("\n"))
|
2013-11-05 13:04:47 -05:00
|
|
|
ensure
|
|
|
|
# close & delete the temp file
|
|
|
|
hotlinked && hotlinked.close!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
2014-04-21 17:08:17 -04:00
|
|
|
post.reload
|
|
|
|
if start_raw != post.raw
|
|
|
|
# post was edited - start over (after 10 minutes)
|
|
|
|
backoff = args.fetch(:backoff, 1) + 1
|
|
|
|
delay = SiteSetting.ninja_edit_window * args[:backoff]
|
|
|
|
Jobs.enqueue_in(delay.seconds.to_i, :pull_hotlinked_images, args.merge!(backoff: backoff))
|
|
|
|
elsif raw != post.raw
|
2014-04-24 10:07:47 -04:00
|
|
|
options = {
|
|
|
|
edit_reason: I18n.t("upload.edit_reason"),
|
|
|
|
bypass_bump: true # we never want that job to bump the topic
|
|
|
|
}
|
2013-11-05 13:04:47 -05:00
|
|
|
post.revise(Discourse.system_user, raw, options)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def extract_images_from(html)
|
|
|
|
doc = Nokogiri::HTML::fragment(html)
|
|
|
|
doc.css("img") - doc.css(".onebox-result img") - doc.css("img.avatar")
|
|
|
|
end
|
|
|
|
|
|
|
|
def is_valid_image_url(src)
|
2014-05-07 13:49:16 -04:00
|
|
|
# make sure we actually have a url
|
|
|
|
return false unless src.present?
|
|
|
|
# we don't want to pull uploaded images
|
|
|
|
return false if Discourse.store.has_been_uploaded?(src)
|
|
|
|
# parse the src
|
|
|
|
begin
|
|
|
|
uri = URI.parse(src)
|
|
|
|
rescue URI::InvalidURIError
|
|
|
|
return false
|
|
|
|
end
|
|
|
|
# we don't want to pull images hosted on the CDN (if we use one)
|
|
|
|
return false if Discourse.asset_host.present? && URI.parse(Discourse.asset_host).hostname == uri.hostname
|
|
|
|
# we don't want to pull images hosted on the main domain
|
|
|
|
return false if URI.parse(Discourse.base_url_no_prefix).hostname == uri.hostname
|
|
|
|
# check the domains blacklist
|
2014-04-21 16:59:53 -04:00
|
|
|
SiteSetting.should_download_images?(src)
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|