2019-05-30 02:38:46 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
class InlineUploads
|
2019-06-03 03:41:26 -04:00
|
|
|
PLACEHOLDER = "__replace__"
|
2019-06-06 03:50:16 -04:00
|
|
|
PATH_PLACEHOLDER = "__replace_path__"
|
2019-06-03 03:41:26 -04:00
|
|
|
|
2019-09-13 12:21:19 -04:00
|
|
|
UPLOAD_REGEXP_PATTERN = "/original/(\\dX/(?:\\h/)*\\h{40}[a-zA-Z0-9.]*)(\\?v=\\d+)?"
|
2019-06-03 03:41:26 -04:00
|
|
|
private_constant :UPLOAD_REGEXP_PATTERN
|
|
|
|
|
2019-05-30 02:38:46 -04:00
|
|
|
def self.process(markdown, on_missing: nil)
|
|
|
|
markdown = markdown.dup
|
2019-06-13 00:08:01 -04:00
|
|
|
|
|
|
|
match_md_reference(markdown) do |match, src, replacement, index|
|
|
|
|
if upload = Upload.get_from_url(src)
|
|
|
|
markdown = markdown.sub(match, replacement.sub!(PATH_PLACEHOLDER, "__#{upload.sha1}__"))
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
cooked_fragment = Nokogiri::HTML5.fragment(PrettyText.cook(markdown, disable_emojis: true))
|
2022-02-27 21:20:58 -05:00
|
|
|
link_occurrences = []
|
2019-05-30 02:38:46 -04:00
|
|
|
|
|
|
|
cooked_fragment.traverse do |node|
|
|
|
|
if node.name == "img"
|
|
|
|
# Do nothing
|
2023-01-09 07:20:10 -05:00
|
|
|
elsif !(
|
|
|
|
node.children.count == 1 &&
|
|
|
|
(node.children[0].name != "img" && node.children[0].children.blank?)
|
|
|
|
) &&
|
|
|
|
!(
|
|
|
|
node.name == "a" && node.children.count > 1 &&
|
|
|
|
!node_children_names(node).include?("img")
|
|
|
|
)
|
2019-05-30 02:38:46 -04:00
|
|
|
next
|
|
|
|
end
|
|
|
|
|
|
|
|
if seen_link = matched_uploads(node).first
|
2019-06-12 04:41:44 -04:00
|
|
|
if (actual_link = (node.attributes["href"]&.value || node.attributes["src"]&.value))
|
2022-02-27 21:20:58 -05:00
|
|
|
link_occurrences << { link: actual_link, is_valid: true }
|
2019-06-12 04:41:44 -04:00
|
|
|
elsif node.name != "p"
|
2022-02-27 21:20:58 -05:00
|
|
|
link_occurrences << { link: seen_link, is_valid: false }
|
2019-06-12 04:41:44 -04:00
|
|
|
end
|
2019-05-30 02:38:46 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-03 03:41:26 -04:00
|
|
|
raw_matches = []
|
2019-05-30 02:38:46 -04:00
|
|
|
|
2019-06-06 03:50:16 -04:00
|
|
|
match_bbcode_img(markdown) do |match, src, replacement, index|
|
|
|
|
raw_matches << [match, src, replacement, index]
|
2019-06-03 03:41:26 -04:00
|
|
|
end
|
|
|
|
|
2019-06-06 03:50:16 -04:00
|
|
|
match_md_inline_img(markdown) do |match, src, replacement, index|
|
|
|
|
raw_matches << [match, src, replacement, index]
|
2019-06-03 03:41:26 -04:00
|
|
|
end
|
2019-05-30 02:38:46 -04:00
|
|
|
|
2019-06-06 03:50:16 -04:00
|
|
|
match_img(markdown) do |match, src, replacement, index|
|
|
|
|
raw_matches << [match, src, replacement, index]
|
|
|
|
end
|
2019-06-03 03:41:26 -04:00
|
|
|
|
2019-06-06 03:50:16 -04:00
|
|
|
match_anchor(markdown) do |match, href, replacement, index|
|
|
|
|
raw_matches << [match, href, replacement, index]
|
2019-06-03 03:41:26 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
regexps = [
|
2023-01-09 07:20:10 -05:00
|
|
|
%r{(https?://[a-zA-Z0-9\./-]+/#{Discourse.store.upload_path}#{UPLOAD_REGEXP_PATTERN})},
|
2019-06-03 03:41:26 -04:00
|
|
|
]
|
|
|
|
|
|
|
|
if Discourse.store.external?
|
2019-09-13 12:21:19 -04:00
|
|
|
regexps << /((?:https?:)?#{SiteSetting.Upload.s3_base_url}#{UPLOAD_REGEXP_PATTERN})/
|
2019-06-06 23:46:52 -04:00
|
|
|
regexps << /(#{SiteSetting.Upload.s3_cdn_url}#{UPLOAD_REGEXP_PATTERN})/
|
2019-06-03 03:41:26 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
regexps.each do |regexp|
|
2019-06-06 23:46:52 -04:00
|
|
|
indexes = Set.new
|
|
|
|
|
|
|
|
markdown.scan(/(\n{2,}|\A)#{regexp}$/) do |match|
|
2019-09-13 12:21:19 -04:00
|
|
|
if match[1].present? && match[2].present?
|
|
|
|
extension = match[2].split(".")[-1].downcase
|
2019-06-06 23:46:52 -04:00
|
|
|
index = $~.offset(2)[0]
|
|
|
|
indexes << index
|
2019-07-17 01:43:50 -04:00
|
|
|
if FileHelper.supported_images.include?(extension)
|
|
|
|
raw_matches << [match[1], match[1], +"![](#{PLACEHOLDER})", index]
|
|
|
|
else
|
2019-07-17 06:24:42 -04:00
|
|
|
raw_matches << [match[1], match[1], +"#{Discourse.base_url}#{PATH_PLACEHOLDER}", index]
|
2019-07-17 01:43:50 -04:00
|
|
|
end
|
2019-06-06 23:46:52 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
markdown.scan(/^#{regexp}(\s)/) do |match|
|
|
|
|
if match[0].present?
|
|
|
|
index = $~.offset(0)[0]
|
2019-09-13 12:21:19 -04:00
|
|
|
next if !indexes.add?(index)
|
|
|
|
raw_matches << [match[0], match[0], +"#{Discourse.base_url}#{PATH_PLACEHOLDER}", index]
|
2019-06-06 23:46:52 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
markdown.scan(/\[[^\[\]]*\]: #{regexp}/) do |match|
|
2019-09-13 12:21:19 -04:00
|
|
|
indexes.add($~.offset(1)[0]) if match[0].present?
|
2019-06-06 23:46:52 -04:00
|
|
|
end
|
|
|
|
|
2019-06-14 01:47:44 -04:00
|
|
|
markdown.scan(/(([\n\s\)\]\<])+)#{regexp}/) do |match|
|
2019-06-06 23:46:52 -04:00
|
|
|
if matched_uploads(match[2]).present?
|
2019-09-13 12:21:19 -04:00
|
|
|
next if !indexes.add?($~.offset(3)[0])
|
|
|
|
index = $~.offset(0)[0]
|
|
|
|
raw_matches << [match[2], match[2], +"#{Discourse.base_url}#{PATH_PLACEHOLDER}", index]
|
2019-06-03 03:41:26 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
raw_matches
|
|
|
|
.sort { |a, b| a[3] <=> b[3] }
|
|
|
|
.each do |match, link, replace_with, _index|
|
2023-01-09 07:20:10 -05:00
|
|
|
node_info = link_occurrences.shift
|
|
|
|
next unless node_info&.dig(:is_valid)
|
2019-06-03 03:41:26 -04:00
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
if link.include?(node_info[:link])
|
|
|
|
begin
|
|
|
|
uri = URI(link)
|
|
|
|
rescue URI::Error
|
|
|
|
end
|
2019-06-03 03:41:26 -04:00
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
if !Discourse.store.external?
|
|
|
|
host = uri&.host
|
2019-06-03 03:41:26 -04:00
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
hosts = [Discourse.current_hostname]
|
2019-06-14 01:11:53 -04:00
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
if cdn_url = GlobalSetting.cdn_url
|
|
|
|
hosts << URI(GlobalSetting.cdn_url).hostname
|
|
|
|
end
|
2019-06-14 01:56:35 -04:00
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
next if host && !hosts.include?(host)
|
2019-06-14 01:56:35 -04:00
|
|
|
end
|
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
upload = Upload.get_from_url(link)
|
2019-06-03 03:41:26 -04:00
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
if upload
|
|
|
|
replace_with.sub!(PLACEHOLDER, upload.short_url)
|
|
|
|
replace_with.sub!(PATH_PLACEHOLDER, upload.short_path)
|
|
|
|
markdown.sub!(match, replace_with)
|
|
|
|
else
|
|
|
|
on_missing.call(link) if on_missing
|
|
|
|
end
|
2019-05-30 02:38:46 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-13 12:21:19 -04:00
|
|
|
markdown.scan(/(__(\h{40})__)/) do |match|
|
2019-06-13 00:08:01 -04:00
|
|
|
upload = Upload.find_by(sha1: match[1])
|
|
|
|
markdown = markdown.sub(match[0], upload.short_path)
|
|
|
|
end
|
|
|
|
|
2019-05-30 02:38:46 -04:00
|
|
|
markdown
|
|
|
|
end
|
|
|
|
|
2019-06-06 03:50:16 -04:00
|
|
|
def self.match_md_inline_img(markdown, external_src: false)
|
2020-04-20 21:47:48 -04:00
|
|
|
markdown.scan(/(!?\[([^\[\]]*)\]\(([^\s\)]+)([ ]*['"]{1}[^\)]*['"]{1}[ ]*)?\))/) do |match|
|
2021-10-13 12:59:05 -04:00
|
|
|
if (external_src || matched_uploads(match[2]).present?) && block_given?
|
2019-06-06 03:50:16 -04:00
|
|
|
yield(
|
|
|
|
match[0],
|
|
|
|
match[2],
|
|
|
|
+"#{match[0].start_with?("!") ? "!" : ""}[#{match[1]}](#{PLACEHOLDER}#{match[3]})",
|
|
|
|
$~.offset(0)[0]
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-13 01:53:43 -04:00
|
|
|
def self.match_bbcode_img(markdown, external_src: false)
|
2023-01-09 07:20:10 -05:00
|
|
|
markdown.scan(%r{(\[img\]\s*([^\[\]\s]+)\s*\[/img\])}i) do |match|
|
2021-10-13 12:59:05 -04:00
|
|
|
if (external_src || (matched_uploads(match[1]).present?)) && block_given?
|
2019-06-13 01:47:18 -04:00
|
|
|
yield(match[0], match[1], +"![](#{PLACEHOLDER})", $~.offset(0)[0])
|
|
|
|
end
|
2019-06-06 03:50:16 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.match_md_reference(markdown)
|
|
|
|
markdown.scan(/(\[([^\]]+)\]:([ ]+)(\S+))/) do |match|
|
2019-06-06 23:46:52 -04:00
|
|
|
if match[3] && matched_uploads(match[3]).present? && block_given?
|
2019-06-06 03:50:16 -04:00
|
|
|
yield(
|
|
|
|
match[0],
|
|
|
|
match[3],
|
|
|
|
+"[#{match[1]}]:#{match[2]}#{Discourse.base_url}#{PATH_PLACEHOLDER}",
|
|
|
|
$~.offset(0)[0]
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.match_anchor(markdown, external_href: false)
|
2023-01-09 07:20:10 -05:00
|
|
|
markdown.scan(%r{((<a[^<]+>)([^<\a>]*?)</a>)}i) do |match|
|
|
|
|
node = Nokogiri::HTML5.fragment(match[0]).children[0]
|
|
|
|
href = node.attributes["href"]&.value
|
2019-06-06 03:50:16 -04:00
|
|
|
|
2021-10-13 12:59:05 -04:00
|
|
|
if href && (external_href || matched_uploads(href).present?)
|
2019-06-06 03:50:16 -04:00
|
|
|
has_attachment = node.attributes["class"]&.value
|
|
|
|
index = $~.offset(0)[0]
|
|
|
|
text = match[2].strip.gsub("\n", "").gsub(/ +/, " ")
|
|
|
|
text = "#{text}|attachment" if has_attachment
|
|
|
|
|
|
|
|
yield(match[0], href, +"[#{text}](#{PLACEHOLDER})", index) if block_given?
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-07-08 01:50:30 -04:00
|
|
|
def self.match_img(markdown, external_src: false, uploads: nil)
|
2023-01-09 07:20:10 -05:00
|
|
|
markdown.scan(%r{(<(?!img)[^<>]+/?>)?(\s*)(<img [^>\n]+>)}i) do |match|
|
|
|
|
node = Nokogiri::HTML5.fragment(match[2].strip).children[0]
|
2021-05-14 09:52:40 -04:00
|
|
|
src = node&.attributes&.[]("src")&.value
|
2019-06-06 03:50:16 -04:00
|
|
|
|
2021-10-13 12:59:05 -04:00
|
|
|
if src && (external_src || matched_uploads(src).present?)
|
2020-07-08 01:50:30 -04:00
|
|
|
upload = uploads&.[](src)
|
2022-03-29 05:55:10 -04:00
|
|
|
node["src"] = upload&.short_url || PLACEHOLDER
|
2020-07-08 01:50:30 -04:00
|
|
|
|
2019-09-12 15:25:14 -04:00
|
|
|
spaces_before = match[1].present? ? match[1][/ +$/].size : 0
|
2022-03-29 05:55:10 -04:00
|
|
|
replacement = +"#{" " * spaces_before}#{node.to_s}"
|
2019-06-06 03:50:16 -04:00
|
|
|
|
2019-09-12 15:25:14 -04:00
|
|
|
yield(match[2], src, replacement, $~.offset(0)[0]) if block_given?
|
2019-06-06 03:50:16 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-05-16 12:56:00 -04:00
|
|
|
def self.replace_hotlinked_image_urls(raw:, &blk)
|
2023-01-09 07:20:10 -05:00
|
|
|
replace =
|
|
|
|
Proc.new do |match, match_src, replacement, _index|
|
|
|
|
upload = blk.call(match_src)
|
|
|
|
next if !upload
|
|
|
|
|
|
|
|
replacement =
|
|
|
|
if replacement.include?(InlineUploads::PLACEHOLDER)
|
|
|
|
replacement.sub(InlineUploads::PLACEHOLDER, upload.short_url)
|
|
|
|
elsif replacement.include?(InlineUploads::PATH_PLACEHOLDER)
|
|
|
|
replacement.sub(InlineUploads::PATH_PLACEHOLDER, upload.short_path)
|
|
|
|
end
|
2022-05-16 12:56:00 -04:00
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
raw = raw.gsub(match, replacement)
|
|
|
|
end
|
2022-05-16 12:56:00 -04:00
|
|
|
|
|
|
|
# there are 6 ways to insert an image in a post
|
|
|
|
# HTML tag - <img src="http://...">
|
|
|
|
InlineUploads.match_img(raw, external_src: true, &replace)
|
|
|
|
|
|
|
|
# BBCode tag - [img]http://...[/img]
|
|
|
|
InlineUploads.match_bbcode_img(raw, external_src: true, &replace)
|
|
|
|
|
|
|
|
# Markdown linked image - [![alt](http://...)](http://...)
|
|
|
|
# Markdown inline - ![alt](http://...)
|
|
|
|
# Markdown inline - ![](http://... "image title")
|
|
|
|
# Markdown inline - ![alt](http://... "image title")
|
|
|
|
InlineUploads.match_md_inline_img(raw, external_src: true, &replace)
|
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
raw =
|
|
|
|
raw.gsub(%r{^(https?://\S+)(\s?)$}) do |match|
|
|
|
|
if upload = blk.call(match)
|
|
|
|
"![](#{upload.short_url})"
|
|
|
|
else
|
|
|
|
match
|
|
|
|
end
|
2022-07-04 22:47:10 -04:00
|
|
|
end
|
|
|
|
|
2022-05-16 12:56:00 -04:00
|
|
|
raw
|
|
|
|
end
|
|
|
|
|
2019-05-30 02:38:46 -04:00
|
|
|
def self.matched_uploads(node)
|
2019-12-18 00:51:57 -05:00
|
|
|
upload_path = Discourse.store.upload_path
|
2023-01-09 07:20:10 -05:00
|
|
|
base_url = Discourse.base_url.sub(%r{https?://}, "(https?://)")
|
2019-07-29 06:57:56 -04:00
|
|
|
|
2019-05-30 02:38:46 -04:00
|
|
|
regexps = [
|
2023-01-09 07:20:10 -05:00
|
|
|
%r{(upload://([a-zA-Z0-9]+)[a-zA-Z0-9\.]*)},
|
|
|
|
%r{(/uploads/short-url/([a-zA-Z0-9]+)[a-zA-Z0-9\.]*)},
|
|
|
|
%r{(#{base_url}/uploads/short-url/([a-zA-Z0-9]+)[a-zA-Z0-9\.]*)},
|
|
|
|
%r{(#{GlobalSetting.relative_url_root}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})},
|
|
|
|
%r{(#{base_url}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})},
|
2019-05-30 02:38:46 -04:00
|
|
|
]
|
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
if GlobalSetting.cdn_url && (cdn_url = GlobalSetting.cdn_url.sub(%r{https?://}, "(https?://)"))
|
|
|
|
regexps << %r{(#{cdn_url}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})}
|
2019-09-10 22:02:42 -04:00
|
|
|
if GlobalSetting.relative_url_root.present?
|
2023-01-09 07:20:10 -05:00
|
|
|
regexps << %r{(#{cdn_url}#{GlobalSetting.relative_url_root}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})}
|
2019-09-10 22:02:42 -04:00
|
|
|
end
|
2019-09-10 21:50:48 -04:00
|
|
|
end
|
2019-05-30 02:38:46 -04:00
|
|
|
|
|
|
|
if Discourse.store.external?
|
|
|
|
if Rails.configuration.multisite
|
2023-01-09 07:20:10 -05:00
|
|
|
regexps << %r{((https?:)?#{SiteSetting.Upload.s3_base_url}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})}
|
|
|
|
regexps << %r{(#{SiteSetting.Upload.s3_cdn_url}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})}
|
2019-05-30 02:38:46 -04:00
|
|
|
else
|
2019-06-12 04:41:44 -04:00
|
|
|
regexps << /((https?:)?#{SiteSetting.Upload.s3_base_url}#{UPLOAD_REGEXP_PATTERN})/
|
2019-06-03 03:41:26 -04:00
|
|
|
regexps << /(#{SiteSetting.Upload.s3_cdn_url}#{UPLOAD_REGEXP_PATTERN})/
|
2019-05-30 02:38:46 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-13 12:21:19 -04:00
|
|
|
matches = []
|
2019-05-30 02:38:46 -04:00
|
|
|
node = node.to_s
|
|
|
|
|
|
|
|
regexps.each do |regexp|
|
2023-01-09 07:20:10 -05:00
|
|
|
node.scan(/(^|[\n\s"'\(>])#{regexp}($|[\n\s"'\)<])/) { |matched| matches << matched[1] }
|
2019-05-30 02:38:46 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
matches
|
|
|
|
end
|
|
|
|
private_class_method :matched_uploads
|
2019-06-18 23:14:46 -04:00
|
|
|
|
|
|
|
def self.node_children_names(node, names = Set.new)
|
|
|
|
if node.children.blank?
|
|
|
|
names << node.name
|
|
|
|
return names
|
|
|
|
end
|
|
|
|
|
2023-01-09 07:20:10 -05:00
|
|
|
node.children.each { |child| names = node_children_names(child, names) }
|
2019-06-18 23:14:46 -04:00
|
|
|
|
|
|
|
names
|
|
|
|
end
|
|
|
|
private_class_method :node_children_names
|
2019-05-30 02:38:46 -04:00
|
|
|
end
|