2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2013-12-11 21:41:34 -05:00
|
|
|
class DiscourseDiff
|
|
|
|
MAX_DIFFERENCE = 200
|
|
|
|
|
|
|
|
def initialize(before, after)
|
|
|
|
@before = before
|
|
|
|
@after = after
|
|
|
|
end
|
|
|
|
|
|
|
|
def inline_html
|
|
|
|
i = 0
|
|
|
|
inline = []
|
2024-11-30 10:30:30 -05:00
|
|
|
while i < block_by_block_diff.size
|
|
|
|
op_code = block_by_block_diff[i][1]
|
2013-12-11 21:41:34 -05:00
|
|
|
if op_code == :common
|
2024-11-30 10:30:30 -05:00
|
|
|
inline << block_by_block_diff[i][0]
|
2013-12-11 21:41:34 -05:00
|
|
|
else
|
|
|
|
if op_code == :delete
|
|
|
|
opposite_op_code = :add
|
|
|
|
klass = "del"
|
|
|
|
first = i
|
|
|
|
second = i + 1
|
|
|
|
else
|
|
|
|
opposite_op_code = :delete
|
|
|
|
klass = "ins"
|
|
|
|
first = i + 1
|
|
|
|
second = i
|
|
|
|
end
|
|
|
|
|
2024-11-30 10:30:30 -05:00
|
|
|
if i + 1 < block_by_block_diff.size && block_by_block_diff[i + 1][1] == opposite_op_code
|
2013-12-11 21:41:34 -05:00
|
|
|
diff =
|
|
|
|
ONPDiff.new(
|
2024-11-30 10:30:30 -05:00
|
|
|
tokenize_html(block_by_block_diff[first][0]),
|
|
|
|
tokenize_html(block_by_block_diff[second][0]),
|
2013-12-11 21:41:34 -05:00
|
|
|
).diff
|
|
|
|
inline << generate_inline_html(diff)
|
|
|
|
i += 1
|
|
|
|
else
|
2024-11-30 10:30:30 -05:00
|
|
|
inline << add_class_or_wrap_in_tags(block_by_block_diff[i][0], klass)
|
2013-12-11 21:41:34 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
i += 1
|
|
|
|
end
|
|
|
|
|
|
|
|
"<div class=\"inline-diff\">#{inline.join}</div>"
|
|
|
|
end
|
|
|
|
|
|
|
|
def side_by_side_html
|
|
|
|
i = 0
|
|
|
|
left, right = [], []
|
2024-11-30 10:30:30 -05:00
|
|
|
while i < block_by_block_diff.size
|
|
|
|
op_code = block_by_block_diff[i][1]
|
2013-12-11 21:41:34 -05:00
|
|
|
if op_code == :common
|
2024-11-30 10:30:30 -05:00
|
|
|
left << block_by_block_diff[i][0]
|
|
|
|
right << block_by_block_diff[i][0]
|
2013-12-11 21:41:34 -05:00
|
|
|
else
|
|
|
|
if op_code == :delete
|
|
|
|
opposite_op_code = :add
|
|
|
|
side = left
|
|
|
|
klass = "del"
|
|
|
|
first = i
|
|
|
|
second = i + 1
|
|
|
|
else
|
|
|
|
opposite_op_code = :delete
|
|
|
|
side = right
|
|
|
|
klass = "ins"
|
|
|
|
first = i + 1
|
|
|
|
second = i
|
|
|
|
end
|
|
|
|
|
2024-11-30 10:30:30 -05:00
|
|
|
if i + 1 < block_by_block_diff.size && block_by_block_diff[i + 1][1] == opposite_op_code
|
2013-12-11 21:41:34 -05:00
|
|
|
diff =
|
|
|
|
ONPDiff.new(
|
2024-11-30 10:30:30 -05:00
|
|
|
tokenize_html(block_by_block_diff[first][0]),
|
|
|
|
tokenize_html(block_by_block_diff[second][0]),
|
2013-12-11 21:41:34 -05:00
|
|
|
).diff
|
|
|
|
deleted, inserted = generate_side_by_side_html(diff)
|
|
|
|
left << deleted
|
|
|
|
right << inserted
|
|
|
|
i += 1
|
|
|
|
else
|
2024-11-30 10:30:30 -05:00
|
|
|
side << add_class_or_wrap_in_tags(block_by_block_diff[i][0], klass)
|
2013-12-11 21:41:34 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
i += 1
|
|
|
|
end
|
|
|
|
|
2018-01-29 15:15:50 -05:00
|
|
|
"<div class=\"revision-content\">#{left.join}</div><div class=\"revision-content\">#{right.join}</div>"
|
2013-12-11 21:41:34 -05:00
|
|
|
end
|
|
|
|
|
2013-12-16 12:11:46 -05:00
|
|
|
def side_by_side_markdown
|
2013-12-11 21:41:34 -05:00
|
|
|
i = 0
|
|
|
|
table = ["<table class=\"markdown\">"]
|
2024-11-30 10:30:30 -05:00
|
|
|
while i < line_by_line_diff.size
|
2013-12-11 21:41:34 -05:00
|
|
|
table << "<tr>"
|
2024-11-30 10:30:30 -05:00
|
|
|
op_code = line_by_line_diff[i][1]
|
2013-12-11 21:41:34 -05:00
|
|
|
if op_code == :common
|
2024-11-30 10:30:30 -05:00
|
|
|
table << "<td>#{line_by_line_diff[i][0]}</td>"
|
|
|
|
table << "<td>#{line_by_line_diff[i][0]}</td>"
|
2013-12-11 21:41:34 -05:00
|
|
|
else
|
|
|
|
if op_code == :delete
|
|
|
|
opposite_op_code = :add
|
|
|
|
first = i
|
|
|
|
second = i + 1
|
|
|
|
else
|
|
|
|
opposite_op_code = :delete
|
|
|
|
first = i + 1
|
|
|
|
second = i
|
|
|
|
end
|
|
|
|
|
2024-11-30 10:30:30 -05:00
|
|
|
if i + 1 < line_by_line_diff.size && line_by_line_diff[i + 1][1] == opposite_op_code
|
2013-12-16 12:11:46 -05:00
|
|
|
before_tokens, after_tokens =
|
2024-11-30 10:30:30 -05:00
|
|
|
tokenize_markdown(line_by_line_diff[first][0]),
|
|
|
|
tokenize_markdown(line_by_line_diff[second][0])
|
2019-06-26 19:45:52 -04:00
|
|
|
if (before_tokens.size - after_tokens.size).abs > MAX_DIFFERENCE
|
2013-12-11 21:41:34 -05:00
|
|
|
before_tokens, after_tokens =
|
2024-11-30 10:30:30 -05:00
|
|
|
tokenize_line(line_by_line_diff[first][0]),
|
|
|
|
tokenize_line(line_by_line_diff[second][0])
|
2013-12-11 21:41:34 -05:00
|
|
|
end
|
|
|
|
diff = ONPDiff.new(before_tokens, after_tokens).short_diff
|
2013-12-16 12:11:46 -05:00
|
|
|
deleted, inserted = generate_side_by_side_markdown(diff)
|
2013-12-11 21:41:34 -05:00
|
|
|
table << "<td class=\"diff-del\">#{deleted.join}</td>"
|
|
|
|
table << "<td class=\"diff-ins\">#{inserted.join}</td>"
|
|
|
|
i += 1
|
|
|
|
else
|
|
|
|
if op_code == :delete
|
2024-11-30 10:30:30 -05:00
|
|
|
table << "<td class=\"diff-del\">#{line_by_line_diff[i][0]}</td>"
|
2013-12-11 21:41:34 -05:00
|
|
|
table << "<td></td>"
|
|
|
|
else
|
|
|
|
table << "<td></td>"
|
2024-11-30 10:30:30 -05:00
|
|
|
table << "<td class=\"diff-ins\">#{line_by_line_diff[i][0]}</td>"
|
2013-12-11 21:41:34 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
table << "</tr>"
|
|
|
|
i += 1
|
|
|
|
end
|
|
|
|
table << "</table>"
|
|
|
|
|
|
|
|
table.join
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2024-11-30 10:30:30 -05:00
|
|
|
def block_by_block_diff
|
|
|
|
@block_by_block_diff ||=
|
|
|
|
begin
|
|
|
|
before_html = tokenize_html_blocks(@before)
|
|
|
|
after_html = tokenize_html_blocks(@after)
|
|
|
|
ONPDiff.new(before_html, after_html).paragraph_diff
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def line_by_line_diff
|
|
|
|
@line_by_line_diff ||=
|
|
|
|
begin
|
|
|
|
before_markdown = tokenize_line(CGI.escapeHTML(@before))
|
|
|
|
after_markdown = tokenize_line(CGI.escapeHTML(@after))
|
|
|
|
ONPDiff.new(before_markdown, after_markdown).short_diff
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-12-11 21:41:34 -05:00
|
|
|
def tokenize_line(text)
|
|
|
|
text.scan(/[^\r\n]+[\r\n]*/)
|
|
|
|
end
|
|
|
|
|
2013-12-16 12:11:46 -05:00
|
|
|
def tokenize_markdown(text)
|
2013-12-11 21:41:34 -05:00
|
|
|
t, tokens = [], []
|
|
|
|
i = 0
|
2019-06-26 19:45:52 -04:00
|
|
|
while i < text.size
|
2013-12-11 21:41:34 -05:00
|
|
|
if text[i] =~ /\w/
|
|
|
|
t << text[i]
|
2023-01-20 13:52:49 -05:00
|
|
|
elsif text[i] =~ /[ \t]/ && t.join =~ /\A\w+\z/
|
2013-12-11 21:41:34 -05:00
|
|
|
begin
|
|
|
|
t << text[i]
|
|
|
|
i += 1
|
2019-06-26 19:45:52 -04:00
|
|
|
end while i < text.size && text[i] =~ /[ \t]/
|
2013-12-11 21:41:34 -05:00
|
|
|
i -= 1
|
|
|
|
tokens << t.join
|
|
|
|
t = []
|
|
|
|
else
|
2019-06-26 19:45:52 -04:00
|
|
|
tokens << t.join if t.size > 0
|
2013-12-11 21:41:34 -05:00
|
|
|
tokens << text[i]
|
|
|
|
t = []
|
|
|
|
end
|
|
|
|
i += 1
|
|
|
|
end
|
2019-06-26 19:45:52 -04:00
|
|
|
tokens << t.join if t.size > 0
|
2013-12-11 21:41:34 -05:00
|
|
|
tokens
|
|
|
|
end
|
|
|
|
|
|
|
|
def tokenize_html_blocks(html)
|
2020-05-04 23:46:57 -04:00
|
|
|
Nokogiri::HTML5.fragment(html).search("./*").map(&:to_html)
|
2013-12-11 21:41:34 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def tokenize_html(html)
|
|
|
|
HtmlTokenizer.tokenize(html)
|
|
|
|
end
|
|
|
|
|
|
|
|
def add_class_or_wrap_in_tags(html_or_text, klass)
|
2019-05-07 11:35:52 -04:00
|
|
|
result = html_or_text.dup
|
|
|
|
index_of_next_chevron = result.index(">")
|
2019-06-26 19:45:52 -04:00
|
|
|
if result.size > 0 && result[0] == "<" && index_of_next_chevron
|
2019-05-07 11:35:52 -04:00
|
|
|
index_of_class = result.index("class=")
|
2013-12-11 21:41:34 -05:00
|
|
|
if index_of_class.nil? || index_of_class > index_of_next_chevron
|
|
|
|
# we do not have a class for the current tag
|
|
|
|
# add it right before the ">"
|
2019-05-07 11:35:52 -04:00
|
|
|
result.insert(index_of_next_chevron, " class=\"diff-#{klass}\"")
|
2013-12-11 21:41:34 -05:00
|
|
|
else
|
2018-04-16 09:41:45 -04:00
|
|
|
# we have a class, insert it at the beginning if not already present
|
2019-05-07 11:35:52 -04:00
|
|
|
classes = result[/class=(["'])([^\1]*)\1/, 2]
|
2018-04-16 09:41:45 -04:00
|
|
|
if classes.include?("diff-#{klass}")
|
2019-05-07 11:35:52 -04:00
|
|
|
result
|
2018-04-16 09:41:45 -04:00
|
|
|
else
|
2019-06-26 19:45:52 -04:00
|
|
|
result.insert(index_of_class + "class=".size + 1, "diff-#{klass} ")
|
2018-04-16 09:41:45 -04:00
|
|
|
end
|
2013-12-11 21:41:34 -05:00
|
|
|
end
|
|
|
|
else
|
2019-05-07 11:35:52 -04:00
|
|
|
"<#{klass}>#{result}</#{klass}>"
|
2013-12-11 21:41:34 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def generate_inline_html(diff)
|
|
|
|
inline = []
|
|
|
|
diff.each do |d|
|
|
|
|
case d[1]
|
|
|
|
when :common
|
|
|
|
inline << d[0]
|
|
|
|
when :delete
|
|
|
|
inline << add_class_or_wrap_in_tags(d[0], "del")
|
|
|
|
when :add
|
|
|
|
inline << add_class_or_wrap_in_tags(d[0], "ins")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
inline
|
|
|
|
end
|
|
|
|
|
|
|
|
def generate_side_by_side_html(diff)
|
|
|
|
deleted, inserted = [], []
|
|
|
|
diff.each do |d|
|
|
|
|
case d[1]
|
|
|
|
when :common
|
|
|
|
deleted << d[0]
|
|
|
|
inserted << d[0]
|
|
|
|
when :delete
|
|
|
|
deleted << add_class_or_wrap_in_tags(d[0], "del")
|
|
|
|
when :add
|
|
|
|
inserted << add_class_or_wrap_in_tags(d[0], "ins")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
[deleted, inserted]
|
|
|
|
end
|
|
|
|
|
2013-12-16 12:11:46 -05:00
|
|
|
def generate_side_by_side_markdown(diff)
|
2013-12-11 21:41:34 -05:00
|
|
|
deleted, inserted = [], []
|
|
|
|
diff.each do |d|
|
|
|
|
case d[1]
|
|
|
|
when :common
|
|
|
|
deleted << d[0]
|
|
|
|
inserted << d[0]
|
2014-06-13 05:29:10 -04:00
|
|
|
when :delete
|
|
|
|
deleted << "<del>#{d[0]}</del>"
|
|
|
|
when :add
|
|
|
|
inserted << "<ins>#{d[0]}</ins>"
|
2013-12-11 21:41:34 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
[deleted, inserted]
|
|
|
|
end
|
|
|
|
|
|
|
|
class HtmlTokenizer < Nokogiri::XML::SAX::Document
|
|
|
|
attr_accessor :tokens
|
|
|
|
|
|
|
|
def initialize
|
|
|
|
@tokens = []
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.tokenize(html)
|
|
|
|
me = new
|
|
|
|
parser = Nokogiri::HTML::SAX::Parser.new(me)
|
|
|
|
parser.parse("<html><body>#{html}</body></html>")
|
|
|
|
me.tokens
|
|
|
|
end
|
|
|
|
|
|
|
|
USELESS_TAGS = %w[html body]
|
|
|
|
def start_element(name, attributes = [])
|
|
|
|
return if USELESS_TAGS.include?(name)
|
2021-08-10 09:14:37 -04:00
|
|
|
attrs = attributes.map { |a| " #{a[0]}=\"#{CGI.escapeHTML(a[1])}\"" }.join
|
2013-12-11 21:41:34 -05:00
|
|
|
@tokens << "<#{name}#{attrs}>"
|
|
|
|
end
|
|
|
|
|
|
|
|
AUTOCLOSING_TAGS = %w[area base br col embed hr img input meta]
|
|
|
|
def end_element(name)
|
|
|
|
return if USELESS_TAGS.include?(name) || AUTOCLOSING_TAGS.include?(name)
|
|
|
|
@tokens << "</#{name}>"
|
|
|
|
end
|
|
|
|
|
|
|
|
def characters(string)
|
2016-05-09 02:44:21 -04:00
|
|
|
@tokens.concat string.scan(/\W|\w+[ \t]*/).map { |x| CGI.escapeHTML(x) }
|
2013-12-11 21:41:34 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|