2013-07-06 13:10:53 -04:00
|
|
|
# Post processing that we can do after a post has already been cooked.
|
2013-07-07 19:39:08 -04:00
|
|
|
# For example, inserting the onebox content, or image sizes/thumbnails.
|
2013-02-05 14:16:51 -05:00
|
|
|
|
2013-11-20 07:10:08 -05:00
|
|
|
require_dependency 'url_helper'
|
2015-12-03 15:01:18 -05:00
|
|
|
require_dependency 'pretty_text'
|
2018-03-13 13:07:51 -04:00
|
|
|
require_dependency 'quote_comparer'
|
2013-02-05 14:16:51 -05:00
|
|
|
|
|
|
|
class CookedPostProcessor
|
2013-06-21 12:29:40 -04:00
|
|
|
include ActionView::Helpers::NumberHelper
|
2013-02-19 01:57:14 -05:00
|
|
|
|
2017-11-12 19:19:06 -05:00
|
|
|
attr_reader :cooking_options, :doc
|
2016-08-12 15:28:54 -04:00
|
|
|
|
2017-07-27 21:20:09 -04:00
|
|
|
def initialize(post, opts = {})
|
2013-02-05 14:16:51 -05:00
|
|
|
@dirty = false
|
|
|
|
@opts = opts
|
|
|
|
@post = post
|
2013-12-06 05:16:13 -05:00
|
|
|
@previous_cooked = (@post.cooked || "").dup
|
2015-09-29 12:51:26 -04:00
|
|
|
# NOTE: we re-cook the post here in order to prevent timing issues with edits
|
|
|
|
# cf. https://meta.discourse.org/t/edit-of-rebaked-post-doesnt-show-in-html-only-in-raw/33815/6
|
2015-12-03 15:01:18 -05:00
|
|
|
@cooking_options = post.cooking_options || opts[:cooking_options] || {}
|
|
|
|
@cooking_options[:topic_id] = post.topic_id
|
|
|
|
@cooking_options = @cooking_options.symbolize_keys
|
2016-08-12 15:28:54 -04:00
|
|
|
@cooking_options[:omit_nofollow] = true if post.omit_nofollow?
|
2017-10-17 14:37:51 -04:00
|
|
|
@cooking_options[:cook_method] = post.cook_method
|
2016-04-12 14:09:59 -04:00
|
|
|
|
|
|
|
analyzer = post.post_analyzer
|
|
|
|
@doc = Nokogiri::HTML::fragment(analyzer.cook(post.raw, @cooking_options))
|
|
|
|
@has_oneboxes = analyzer.found_oneboxes?
|
2013-02-19 01:57:14 -05:00
|
|
|
@size_cache = {}
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2013-11-21 19:52:26 -05:00
|
|
|
def post_process(bypass_bump = false)
|
2015-08-13 23:05:13 -04:00
|
|
|
DistributedMutex.synchronize("post_process_#{@post.id}") do
|
2017-10-16 23:17:00 -04:00
|
|
|
DiscourseEvent.trigger(:before_post_process_cooked, @doc, @post)
|
2015-08-13 23:05:13 -04:00
|
|
|
post_process_oneboxes
|
2017-11-16 09:45:07 -05:00
|
|
|
post_process_images
|
2018-03-13 13:07:51 -04:00
|
|
|
post_process_quotes
|
2017-11-16 09:45:07 -05:00
|
|
|
keep_reverse_index_up_to_date
|
2015-08-13 23:05:13 -04:00
|
|
|
optimize_urls
|
2017-10-23 12:15:51 -04:00
|
|
|
update_post_image
|
2017-10-23 13:09:38 -04:00
|
|
|
enforce_nofollow
|
2015-08-13 23:05:13 -04:00
|
|
|
pull_hotlinked_images(bypass_bump)
|
2016-04-07 12:27:26 -04:00
|
|
|
grant_badges
|
2017-06-23 14:35:10 -04:00
|
|
|
DiscourseEvent.trigger(:post_process_cooked, @doc, @post)
|
|
|
|
nil
|
2016-04-05 15:12:02 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-04-06 12:02:18 -04:00
|
|
|
def has_emoji?
|
|
|
|
(@doc.css("img.emoji") - @doc.css(".quote img")).size > 0
|
|
|
|
end
|
|
|
|
|
2016-04-07 12:27:26 -04:00
|
|
|
def grant_badges
|
2016-04-05 16:13:10 -04:00
|
|
|
return unless Guardian.new.can_see?(@post)
|
|
|
|
|
2016-04-13 16:38:24 -04:00
|
|
|
BadgeGranter.grant(Badge.find(Badge::FirstEmoji), @post.user, post_id: @post.id) if has_emoji?
|
|
|
|
BadgeGranter.grant(Badge.find(Badge::FirstOnebox), @post.user, post_id: @post.id) if @has_oneboxes
|
2016-08-10 13:24:01 -04:00
|
|
|
BadgeGranter.grant(Badge.find(Badge::FirstReplyByEmail), @post.user, post_id: @post.id) if @post.is_reply_by_email?
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def keep_reverse_index_up_to_date
|
2017-06-06 20:55:58 -04:00
|
|
|
upload_ids = []
|
2013-10-14 08:27:41 -04:00
|
|
|
|
2016-10-18 03:58:45 -04:00
|
|
|
@doc.css("a/@href", "img/@src").each do |media|
|
|
|
|
if upload = Upload.get_from_url(media.value)
|
2013-11-05 13:04:47 -05:00
|
|
|
upload_ids << upload.id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-16 09:45:07 -05:00
|
|
|
upload_ids |= downloaded_images.values.select { |id| Upload.exists?(id) }
|
2017-06-02 05:39:06 -04:00
|
|
|
|
2017-07-27 21:20:09 -04:00
|
|
|
values = upload_ids.map { |u| "(#{@post.id},#{u})" }.join(",")
|
2013-11-05 13:04:47 -05:00
|
|
|
PostUpload.transaction do
|
2017-08-31 00:06:56 -04:00
|
|
|
PostUpload.where(post_id: @post.id).delete_all
|
2017-11-16 09:45:07 -05:00
|
|
|
if upload_ids.size > 0
|
2013-11-05 13:04:47 -05:00
|
|
|
PostUpload.exec_sql("INSERT INTO post_uploads (post_id, upload_id) VALUES #{values}")
|
2013-07-10 16:55:37 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-02-25 11:42:20 -05:00
|
|
|
def post_process_images
|
2017-11-16 09:45:07 -05:00
|
|
|
extract_images.each do |img|
|
|
|
|
src = img["src"].sub(/^https?:/i, "")
|
|
|
|
if large_images.include?(src)
|
|
|
|
add_large_image_placeholder!(img)
|
|
|
|
elsif broken_images.include?(src)
|
|
|
|
add_broken_image_placeholder!(img)
|
|
|
|
else
|
|
|
|
limit_size!(img)
|
|
|
|
convert_to_link!(img)
|
|
|
|
end
|
2013-04-13 10:31:20 -04:00
|
|
|
end
|
2013-06-15 06:29:20 -04:00
|
|
|
end
|
2013-04-13 10:31:20 -04:00
|
|
|
|
2018-03-13 13:07:51 -04:00
|
|
|
def post_process_quotes
|
|
|
|
@doc.css("aside.quote").each do |q|
|
|
|
|
post_number = q['data-post']
|
|
|
|
topic_id = q['data-topic']
|
|
|
|
if topic_id && post_number
|
|
|
|
comparer = QuoteComparer.new(
|
|
|
|
topic_id.to_i,
|
|
|
|
post_number.to_i,
|
|
|
|
q.css('blockquote').text
|
|
|
|
)
|
|
|
|
|
|
|
|
if comparer.modified?
|
|
|
|
q['class'] = ((q['class'] || '') + " quote-modified").strip
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-15 05:30:47 -05:00
|
|
|
def add_large_image_placeholder!(img)
|
|
|
|
url = img["src"]
|
|
|
|
|
|
|
|
is_hyperlinked = is_a_hyperlink?(img)
|
|
|
|
|
|
|
|
placeholder = create_node("div", "large-image-placeholder")
|
|
|
|
img.add_next_sibling(placeholder)
|
|
|
|
placeholder.add_child(img)
|
|
|
|
|
|
|
|
a = create_link_node(nil, url, true)
|
|
|
|
img.add_next_sibling(a)
|
|
|
|
|
|
|
|
span = create_span_node("url", url)
|
|
|
|
a.add_child(span)
|
|
|
|
span.add_previous_sibling(create_icon_node("image"))
|
|
|
|
span.add_next_sibling(create_span_node("help", I18n.t("upload.placeholders.too_large", max_size_kb: SiteSetting.max_image_size_kb)))
|
|
|
|
|
|
|
|
# Only if the image is already linked
|
|
|
|
if is_hyperlinked
|
|
|
|
parent = placeholder.parent
|
|
|
|
parent.add_next_sibling(placeholder)
|
|
|
|
|
2017-11-15 07:06:48 -05:00
|
|
|
if parent.name == 'a' && parent["href"].present?
|
|
|
|
if url == parent["href"]
|
|
|
|
parent.remove
|
|
|
|
else
|
|
|
|
parent["class"] = "link"
|
|
|
|
a.add_previous_sibling(parent)
|
|
|
|
|
|
|
|
lspan = create_span_node("url", parent["href"])
|
|
|
|
parent.add_child(lspan)
|
|
|
|
lspan.add_previous_sibling(create_icon_node("link"))
|
|
|
|
end
|
2017-11-15 05:30:47 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
img.remove
|
2017-11-16 09:45:07 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def add_broken_image_placeholder!(img)
|
|
|
|
img.name = "span"
|
|
|
|
img.set_attribute("class", "broken-image fa fa-chain-broken")
|
|
|
|
img.set_attribute("title", I18n.t("post.image_placeholder.broken"))
|
|
|
|
img.remove_attribute("src")
|
|
|
|
img.remove_attribute("width")
|
|
|
|
img.remove_attribute("height")
|
2017-11-15 05:30:47 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def large_images
|
2017-11-16 09:45:07 -05:00
|
|
|
@large_images ||= JSON.parse(@post.custom_fields[Post::LARGE_IMAGES].presence || "[]") rescue []
|
|
|
|
end
|
|
|
|
|
|
|
|
def broken_images
|
|
|
|
@broken_images ||= JSON.parse(@post.custom_fields[Post::BROKEN_IMAGES].presence || "[]") rescue []
|
|
|
|
end
|
|
|
|
|
|
|
|
def downloaded_images
|
|
|
|
@downloaded_images ||= JSON.parse(@post.custom_fields[Post::DOWNLOADED_IMAGES].presence || "{}") rescue {}
|
2017-11-15 05:30:47 -05:00
|
|
|
end
|
|
|
|
|
2013-07-07 19:39:08 -04:00
|
|
|
def extract_images
|
2017-11-16 09:45:07 -05:00
|
|
|
# all images with a src attribute
|
2014-07-18 11:54:18 -04:00
|
|
|
@doc.css("img[src]") -
|
2017-11-16 09:45:07 -05:00
|
|
|
# minus data images
|
2014-07-18 11:54:18 -04:00
|
|
|
@doc.css("img[src^='data']") -
|
2017-11-16 09:45:07 -05:00
|
|
|
# minus emojis
|
2015-08-05 06:57:31 -04:00
|
|
|
@doc.css("img.emoji") -
|
2017-11-16 09:45:07 -05:00
|
|
|
# minus oneboxed images
|
2014-07-21 09:59:34 -04:00
|
|
|
oneboxed_images -
|
2017-11-16 09:45:07 -05:00
|
|
|
# minus images inside quotes
|
2014-07-18 11:54:18 -04:00
|
|
|
@doc.css(".quote img")
|
2013-07-07 19:39:08 -04:00
|
|
|
end
|
|
|
|
|
2016-10-31 05:41:33 -04:00
|
|
|
def extract_images_for_post
|
2017-11-16 09:45:07 -05:00
|
|
|
# all images with a src attribute
|
2015-10-15 05:00:47 -04:00
|
|
|
@doc.css("img[src]") -
|
2017-11-16 09:45:07 -05:00
|
|
|
# minus emojis
|
2015-10-15 05:00:47 -04:00
|
|
|
@doc.css("img.emoji") -
|
2017-11-16 09:45:07 -05:00
|
|
|
# minus images inside quotes
|
2015-10-15 05:00:47 -04:00
|
|
|
@doc.css(".quote img")
|
|
|
|
end
|
|
|
|
|
2014-07-21 09:59:34 -04:00
|
|
|
def oneboxed_images
|
2017-06-02 05:39:06 -04:00
|
|
|
@doc.css(".onebox-body img, .onebox img")
|
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def limit_size!(img)
|
2013-11-25 12:36:13 -05:00
|
|
|
# retrieve the size from
|
|
|
|
# 1) the width/height attributes
|
|
|
|
# 2) the dimension from the preview (image_sizes)
|
|
|
|
# 3) the dimension of the original image (HTTP request)
|
|
|
|
w, h = get_size_from_attributes(img) ||
|
|
|
|
get_size_from_image_sizes(img["src"], @opts[:image_sizes]) ||
|
|
|
|
get_size(img["src"])
|
2016-06-14 14:31:51 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
# limit the size of the thumbnail
|
|
|
|
img["width"], img["height"] = ImageSizer.resize(w, h)
|
2013-07-07 19:39:08 -04:00
|
|
|
end
|
|
|
|
|
2013-11-25 12:36:13 -05:00
|
|
|
def get_size_from_attributes(img)
|
|
|
|
w, h = img["width"].to_i, img["height"].to_i
|
2015-08-29 17:56:25 -04:00
|
|
|
return [w, h] unless w <= 0 || h <= 0
|
|
|
|
# if only width or height are specified attempt to scale image
|
|
|
|
if w > 0 || h > 0
|
|
|
|
w = w.to_f
|
|
|
|
h = h.to_f
|
2016-03-07 22:29:18 -05:00
|
|
|
|
|
|
|
return unless original_image_size = get_size(img["src"])
|
|
|
|
original_width, original_height = original_image_size.map(&:to_f)
|
|
|
|
|
2015-08-29 17:56:25 -04:00
|
|
|
if w > 0
|
2017-07-27 21:20:09 -04:00
|
|
|
ratio = w / original_width
|
|
|
|
[w.floor, (original_height * ratio).floor]
|
2015-08-29 17:56:25 -04:00
|
|
|
else
|
2017-07-27 21:20:09 -04:00
|
|
|
ratio = h / original_height
|
|
|
|
[(original_width * ratio).floor, h.floor]
|
2015-08-29 17:56:25 -04:00
|
|
|
end
|
|
|
|
end
|
2013-11-25 12:36:13 -05:00
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def get_size_from_image_sizes(src, image_sizes)
|
|
|
|
return unless image_sizes.present?
|
|
|
|
image_sizes.each do |image_size|
|
|
|
|
url, size = image_size[0], image_size[1]
|
2015-03-16 13:57:15 -04:00
|
|
|
if url && url.include?(src) &&
|
|
|
|
size && size["width"].to_i > 0 && size["height"].to_i > 0
|
|
|
|
return [size["width"], size["height"]]
|
|
|
|
end
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
2013-06-15 06:29:20 -04:00
|
|
|
end
|
2013-02-20 20:07:36 -05:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def get_size(url)
|
2015-08-07 13:31:15 -04:00
|
|
|
return @size_cache[url] if @size_cache.has_key?(url)
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
absolute_url = url
|
|
|
|
absolute_url = Discourse.base_url_no_prefix + absolute_url if absolute_url =~ /^\/[^\/]/
|
2017-05-08 15:35:31 -04:00
|
|
|
|
|
|
|
return unless absolute_url
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
# FastImage fails when there's no scheme
|
2013-12-16 05:44:59 -05:00
|
|
|
absolute_url = SiteSetting.scheme + ":" + absolute_url if absolute_url.start_with?("//")
|
2013-11-05 13:04:47 -05:00
|
|
|
return unless is_valid_image_url?(absolute_url)
|
2015-08-07 13:31:15 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
# we can *always* crawl our own images
|
2016-03-07 22:38:26 -05:00
|
|
|
return unless SiteSetting.crawl_images? || Discourse.store.has_been_uploaded?(url)
|
2015-08-07 13:31:15 -04:00
|
|
|
|
2017-10-18 17:54:36 -04:00
|
|
|
@size_cache[url] = FastImage.size(absolute_url)
|
2018-01-11 11:47:06 -05:00
|
|
|
rescue Zlib::BufError, URI::InvalidURIError, URI::InvalidComponentError
|
|
|
|
# FastImage.size raises BufError for some gifs, leave it.
|
2013-06-17 16:46:48 -04:00
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def is_valid_image_url?(url)
|
|
|
|
uri = URI.parse(url)
|
|
|
|
%w(http https).include? uri.scheme
|
|
|
|
rescue URI::InvalidURIError
|
2013-02-19 01:57:14 -05:00
|
|
|
end
|
|
|
|
|
2018-01-31 16:31:16 -05:00
|
|
|
# only crop when the image is taller than 18:9
|
|
|
|
# we only use 90% of that to allow for a small margin
|
|
|
|
MIN_RATIO_TO_CROP ||= (9.0 / 18.0) * 0.9
|
2016-05-26 13:02:31 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def convert_to_link!(img)
|
2013-02-19 01:57:14 -05:00
|
|
|
src = img["src"]
|
2017-10-23 11:43:53 -04:00
|
|
|
return if src.blank? || is_a_hyperlink?(img)
|
2013-02-19 01:57:14 -05:00
|
|
|
|
2013-07-06 13:10:53 -04:00
|
|
|
width, height = img["width"].to_i, img["height"].to_i
|
2017-10-23 11:43:53 -04:00
|
|
|
# TODO: store original dimentions in db
|
2017-10-18 17:54:36 -04:00
|
|
|
original_width, original_height = (get_size(src) || [0, 0]).map(&:to_i)
|
2013-02-19 01:57:14 -05:00
|
|
|
|
2015-08-07 13:31:15 -04:00
|
|
|
# can't reach the image...
|
2017-10-18 17:54:36 -04:00
|
|
|
if original_width == 0 || original_height == 0
|
2015-08-12 10:10:42 -04:00
|
|
|
Rails.logger.info "Can't reach '#{src}' to get its dimension."
|
2015-08-07 13:31:15 -04:00
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2017-10-18 17:54:36 -04:00
|
|
|
return if original_width <= width && original_height <= height
|
|
|
|
return if original_width <= SiteSetting.max_image_width && original_height <= SiteSetting.max_image_height
|
2013-07-07 19:39:08 -04:00
|
|
|
|
2018-01-31 16:31:16 -05:00
|
|
|
if crop = (original_width.to_f / original_height.to_f < MIN_RATIO_TO_CROP)
|
2016-05-23 10:18:30 -04:00
|
|
|
width, height = ImageSizer.crop(original_width, original_height)
|
|
|
|
img["width"] = width
|
|
|
|
img["height"] = height
|
|
|
|
end
|
|
|
|
|
2017-10-18 17:54:36 -04:00
|
|
|
if upload = Upload.get_from_url(src)
|
2016-05-23 10:18:30 -04:00
|
|
|
upload.create_thumbnail!(width, height, crop)
|
2013-07-07 19:39:08 -04:00
|
|
|
end
|
2013-02-19 01:57:14 -05:00
|
|
|
|
2013-07-07 19:39:08 -04:00
|
|
|
add_lightbox!(img, original_width, original_height, upload)
|
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def is_a_hyperlink?(img)
|
2013-02-19 01:57:14 -05:00
|
|
|
parent = img.parent
|
|
|
|
while parent
|
2013-11-20 07:10:08 -05:00
|
|
|
return true if parent.name == "a"
|
2017-10-23 11:43:53 -04:00
|
|
|
parent = parent.parent if parent.respond_to?(:parent)
|
2013-02-19 01:57:14 -05:00
|
|
|
end
|
2013-11-20 07:10:08 -05:00
|
|
|
false
|
2013-07-07 19:39:08 -04:00
|
|
|
end
|
2013-02-19 01:57:14 -05:00
|
|
|
|
2017-07-27 21:20:09 -04:00
|
|
|
def add_lightbox!(img, original_width, original_height, upload = nil)
|
2013-06-25 20:44:20 -04:00
|
|
|
# first, create a div to hold our lightbox
|
2017-11-15 05:30:47 -05:00
|
|
|
lightbox = create_node("div", "lightbox-wrapper")
|
2013-07-07 19:39:08 -04:00
|
|
|
img.add_next_sibling(lightbox)
|
|
|
|
lightbox.add_child(img)
|
|
|
|
|
2013-06-25 20:44:20 -04:00
|
|
|
# then, the link to our larger image
|
2017-11-15 05:30:47 -05:00
|
|
|
a = create_link_node("lightbox", img["src"])
|
2013-02-19 01:57:14 -05:00
|
|
|
img.add_next_sibling(a)
|
2014-10-15 13:20:04 -04:00
|
|
|
|
|
|
|
if upload && Discourse.store.internal?
|
|
|
|
a["data-download-href"] = Discourse.store.download_url(upload)
|
|
|
|
end
|
|
|
|
|
2013-02-19 01:57:14 -05:00
|
|
|
a.add_child(img)
|
2013-07-07 19:39:08 -04:00
|
|
|
|
|
|
|
# replace the image by its thumbnail
|
2013-11-05 13:04:47 -05:00
|
|
|
w, h = img["width"].to_i, img["height"].to_i
|
|
|
|
img["src"] = upload.thumbnail(w, h).url if upload && upload.has_thumbnail?(w, h)
|
2013-07-07 19:39:08 -04:00
|
|
|
|
2013-06-25 20:44:20 -04:00
|
|
|
# then, some overlay informations
|
2017-11-15 05:30:47 -05:00
|
|
|
meta = create_node("div", "meta")
|
2013-07-07 19:39:08 -04:00
|
|
|
img.add_next_sibling(meta)
|
2013-06-21 12:29:40 -04:00
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
filename = get_filename(upload, img["src"])
|
2013-06-21 12:29:40 -04:00
|
|
|
informations = "#{original_width}x#{original_height}"
|
2013-07-24 03:24:28 -04:00
|
|
|
informations << " #{number_to_human_size(upload.filesize)}" if upload
|
2013-06-21 12:29:40 -04:00
|
|
|
|
2016-08-10 23:27:12 -04:00
|
|
|
a["title"] = CGI.escapeHTML(img["title"] || filename)
|
2013-11-29 14:03:39 -05:00
|
|
|
|
2016-08-10 23:27:12 -04:00
|
|
|
meta.add_child create_span_node("filename", a["title"])
|
2013-06-25 20:44:20 -04:00
|
|
|
meta.add_child create_span_node("informations", informations)
|
|
|
|
meta.add_child create_span_node("expand")
|
2013-06-21 12:29:40 -04:00
|
|
|
end
|
2013-02-19 01:57:14 -05:00
|
|
|
|
2013-06-26 15:53:31 -04:00
|
|
|
def get_filename(upload, src)
|
|
|
|
return File.basename(src) unless upload
|
2013-07-03 18:39:23 -04:00
|
|
|
return upload.original_filename unless upload.original_filename =~ /^blob(\.png)?$/i
|
2013-11-05 13:04:47 -05:00
|
|
|
return I18n.t("upload.pasted_image_filename")
|
2013-06-26 15:53:31 -04:00
|
|
|
end
|
|
|
|
|
2017-11-15 05:30:47 -05:00
|
|
|
def create_node(tag_name, klass)
|
|
|
|
node = Nokogiri::XML::Node.new(tag_name, @doc)
|
|
|
|
node["class"] = klass if klass.present?
|
|
|
|
node
|
|
|
|
end
|
|
|
|
|
2017-07-27 21:20:09 -04:00
|
|
|
def create_span_node(klass, content = nil)
|
2017-11-15 05:30:47 -05:00
|
|
|
span = create_node("span", klass)
|
2013-06-21 12:29:40 -04:00
|
|
|
span.content = content if content
|
|
|
|
span
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2017-11-15 05:30:47 -05:00
|
|
|
def create_icon_node(klass)
|
|
|
|
create_node("i", "fa fa-fw fa-#{klass}")
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_link_node(klass, url, external = false)
|
|
|
|
a = create_node("a", klass)
|
|
|
|
a["href"] = url
|
|
|
|
if external
|
|
|
|
a["target"] = "_blank"
|
|
|
|
a["rel"] = "nofollow noopener"
|
|
|
|
end
|
|
|
|
a
|
|
|
|
end
|
|
|
|
|
2016-10-31 05:41:33 -04:00
|
|
|
def update_post_image
|
|
|
|
img = extract_images_for_post.first
|
2017-06-09 07:16:50 -04:00
|
|
|
return if img.blank?
|
|
|
|
|
2016-10-31 05:41:33 -04:00
|
|
|
if img["src"].present?
|
|
|
|
@post.update_column(:image_url, img["src"][0...255]) # post
|
|
|
|
@post.topic.update_column(:image_url, img["src"][0...255]) if @post.is_first_post? # topic
|
2013-07-07 19:39:08 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def post_process_oneboxes
|
2018-02-13 18:39:44 -05:00
|
|
|
Oneboxer.apply(@doc) do |url|
|
2016-04-12 14:09:59 -04:00
|
|
|
@has_oneboxes = true
|
2018-02-13 18:39:44 -05:00
|
|
|
Oneboxer.onebox(url,
|
|
|
|
invalidate_oneboxes: !!@opts[:invalidate_oneboxes],
|
|
|
|
user_id: @post&.user_id,
|
|
|
|
category_id: @post&.topic&.category_id
|
|
|
|
)
|
2016-11-03 17:48:32 -04:00
|
|
|
end
|
2017-10-23 13:09:38 -04:00
|
|
|
|
2017-06-02 05:39:06 -04:00
|
|
|
oneboxed_images.each do |img|
|
2017-11-27 03:11:28 -05:00
|
|
|
next if img["src"].blank?
|
|
|
|
|
2017-11-16 09:45:07 -05:00
|
|
|
src = img["src"].sub(/^https?:/i, "")
|
|
|
|
|
|
|
|
if large_images.include?(src) || broken_images.include?(src)
|
2017-11-15 05:30:47 -05:00
|
|
|
img.remove
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
2017-11-16 09:45:07 -05:00
|
|
|
upload_id = downloaded_images[src]
|
|
|
|
upload = Upload.find(upload_id) if upload_id
|
2017-10-23 12:15:51 -04:00
|
|
|
img["src"] = upload.url if upload.present?
|
2017-10-23 13:09:38 -04:00
|
|
|
|
2017-11-15 05:30:47 -05:00
|
|
|
# make sure we grab dimensions for oneboxed images
|
|
|
|
# and wrap in a div
|
2017-10-30 22:50:44 -04:00
|
|
|
limit_size!(img)
|
2017-11-07 19:50:01 -05:00
|
|
|
|
|
|
|
next if img["class"]&.include?('onebox-avatar')
|
|
|
|
|
|
|
|
parent_class = img.parent && img.parent["class"]
|
2018-03-21 16:00:05 -04:00
|
|
|
width = img["width"].to_i
|
|
|
|
height = img["height"].to_i
|
2017-11-12 19:19:06 -05:00
|
|
|
|
2018-03-21 16:00:05 -04:00
|
|
|
if parent_class&.include?("onebox-body") && width > 0 && height > 0
|
2017-11-12 19:19:06 -05:00
|
|
|
# special instruction for width == height, assume we are dealing with an avatar
|
|
|
|
if (img["width"].to_i == img["height"].to_i)
|
|
|
|
found = false
|
|
|
|
parent = img
|
|
|
|
while parent = parent.parent
|
2017-11-13 00:06:18 -05:00
|
|
|
if parent["class"] && parent["class"].include?("whitelistedgeneric")
|
2017-11-12 19:19:06 -05:00
|
|
|
found = true
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if found
|
|
|
|
img["class"] = img["class"].to_s + " onebox-avatar"
|
|
|
|
next
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-27 20:32:35 -05:00
|
|
|
if width < 64 && height < 64
|
|
|
|
img["class"] = img["class"].to_s + " onebox-full-image"
|
|
|
|
else
|
|
|
|
img.delete('width')
|
|
|
|
img.delete('height')
|
|
|
|
new_parent = img.add_next_sibling("<div class='aspect-image' style='--aspect-ratio:#{width}/#{height};'/>")
|
|
|
|
new_parent.first.add_child(img)
|
|
|
|
end
|
2018-03-23 08:05:17 -04:00
|
|
|
elsif (parent_class&.include?("instagram-images") || parent_class&.include?("tweet-images")) && width > 0 && height > 0
|
2018-03-21 16:00:05 -04:00
|
|
|
img.remove_attribute("width")
|
|
|
|
img.remove_attribute("height")
|
|
|
|
img.parent["class"] = "aspect-image-full-size"
|
|
|
|
img.parent["style"] = "--aspect-ratio:#{width}/#{height};"
|
2017-10-30 22:50:44 -04:00
|
|
|
end
|
|
|
|
end
|
2018-03-26 06:24:39 -04:00
|
|
|
|
|
|
|
if @cooking_options[:omit_nofollow] || !SiteSetting.add_rel_nofollow_to_user_content
|
|
|
|
@doc.css(".onebox-body a, .onebox a").each { |a| a.remove_attribute("rel") }
|
|
|
|
end
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2013-11-05 13:04:47 -05:00
|
|
|
def optimize_urls
|
2017-02-07 12:06:44 -05:00
|
|
|
# attachments can't be on the CDN when either setting is enabled
|
|
|
|
if SiteSetting.login_required || SiteSetting.prevent_anons_from_downloading_files
|
2016-01-06 15:54:01 -05:00
|
|
|
@doc.css("a.attachment[href]").each do |a|
|
|
|
|
href = a["href"].to_s
|
2016-06-27 16:08:49 -04:00
|
|
|
a["href"] = UrlHelper.schemaless UrlHelper.absolute_without_cdn(href) if UrlHelper.is_local(href)
|
2016-01-06 15:54:01 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-10-06 01:20:01 -04:00
|
|
|
use_s3_cdn = SiteSetting.Upload.enable_s3_uploads && SiteSetting.Upload.s3_cdn_url.present?
|
2016-06-27 16:08:49 -04:00
|
|
|
|
2014-10-15 13:20:04 -04:00
|
|
|
%w{href data-download-href}.each do |selector|
|
|
|
|
@doc.css("a[#{selector}]").each do |a|
|
2016-06-27 16:08:49 -04:00
|
|
|
href = a[selector].to_s
|
|
|
|
a[selector] = UrlHelper.schemaless UrlHelper.absolute(href) if UrlHelper.is_local(href)
|
2016-06-30 11:15:56 -04:00
|
|
|
a[selector] = Discourse.store.cdn_url(a[selector]) if use_s3_cdn
|
2014-10-15 13:20:04 -04:00
|
|
|
end
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
|
|
|
|
2014-07-18 11:54:18 -04:00
|
|
|
@doc.css("img[src]").each do |img|
|
2013-11-05 13:04:47 -05:00
|
|
|
src = img["src"].to_s
|
2015-06-12 06:02:36 -04:00
|
|
|
img["src"] = UrlHelper.schemaless UrlHelper.absolute(src) if UrlHelper.is_local(src)
|
2016-06-30 11:15:56 -04:00
|
|
|
img["src"] = Discourse.store.cdn_url(img["src"]) if use_s3_cdn
|
2013-11-05 13:04:47 -05:00
|
|
|
end
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
2017-10-23 13:09:38 -04:00
|
|
|
|
2017-10-23 12:15:51 -04:00
|
|
|
def enforce_nofollow
|
|
|
|
if !@cooking_options[:omit_nofollow] && SiteSetting.add_rel_nofollow_to_user_content
|
|
|
|
PrettyText.add_rel_nofollow_to_user_content(@doc)
|
|
|
|
end
|
|
|
|
end
|
2013-02-05 14:16:51 -05:00
|
|
|
|
2013-11-21 19:52:26 -05:00
|
|
|
def pull_hotlinked_images(bypass_bump = false)
|
2013-11-15 09:22:18 -05:00
|
|
|
# is the job enabled?
|
|
|
|
return unless SiteSetting.download_remote_images_to_local?
|
2013-11-15 10:46:41 -05:00
|
|
|
# have we enough disk space?
|
|
|
|
return if disable_if_low_on_disk_space
|
2016-07-06 09:51:48 -04:00
|
|
|
# don't download remote images for posts that are more than n days old
|
|
|
|
return unless @post.created_at > (Date.today - SiteSetting.download_remote_images_max_days_old)
|
2013-11-05 13:04:47 -05:00
|
|
|
# we only want to run the job whenever it's changed by a user
|
2017-11-16 09:45:07 -05:00
|
|
|
return if @post.last_editor_id && @post.last_editor_id <= 0
|
2013-11-05 13:04:47 -05:00
|
|
|
# make sure no other job is scheduled
|
|
|
|
Jobs.cancel_scheduled_job(:pull_hotlinked_images, post_id: @post.id)
|
|
|
|
# schedule the job
|
2015-11-24 14:28:42 -05:00
|
|
|
delay = SiteSetting.editing_grace_period + 1
|
2013-11-21 19:52:26 -05:00
|
|
|
Jobs.enqueue_in(delay.seconds.to_i, :pull_hotlinked_images, post_id: @post.id, bypass_bump: bypass_bump)
|
2013-07-10 16:55:37 -04:00
|
|
|
end
|
|
|
|
|
2013-11-15 10:46:41 -05:00
|
|
|
def disable_if_low_on_disk_space
|
2014-10-15 13:20:04 -04:00
|
|
|
return false if available_disk_space >= SiteSetting.download_remote_images_threshold
|
|
|
|
|
|
|
|
SiteSetting.download_remote_images_to_local = false
|
|
|
|
# log the site setting change
|
|
|
|
reason = I18n.t("disable_remote_images_download_reason")
|
|
|
|
staff_action_logger = StaffActionLogger.new(Discourse.system_user)
|
2017-07-27 21:20:09 -04:00
|
|
|
staff_action_logger.log_site_setting_change("download_remote_images_to_local", true, false, details: reason)
|
2015-08-14 17:46:15 -04:00
|
|
|
|
2014-10-15 13:20:04 -04:00
|
|
|
# also send a private message to the site contact user
|
2015-08-14 17:46:15 -04:00
|
|
|
notify_about_low_disk_space
|
2014-10-15 13:20:04 -04:00
|
|
|
|
|
|
|
true
|
2013-11-15 10:46:41 -05:00
|
|
|
end
|
|
|
|
|
2015-08-14 17:46:15 -04:00
|
|
|
def notify_about_low_disk_space
|
|
|
|
SystemMessage.create_from_system_user(Discourse.site_contact_user, :download_remote_images_disabled)
|
|
|
|
end
|
|
|
|
|
2013-11-15 10:46:41 -05:00
|
|
|
def available_disk_space
|
2015-01-26 16:25:32 -05:00
|
|
|
100 - `df -P #{Rails.root}/public/uploads | tail -1 | tr -s ' ' | cut -d ' ' -f 5`.to_i
|
2013-11-15 10:46:41 -05:00
|
|
|
end
|
|
|
|
|
2013-06-15 06:29:20 -04:00
|
|
|
def dirty?
|
2013-12-06 05:16:13 -05:00
|
|
|
@previous_cooked != html
|
2013-06-15 06:29:20 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def html
|
|
|
|
@doc.try(:to_html)
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|