2018-02-19 22:41:00 -05:00
|
|
|
# frozen_string_literal: true
|
2013-07-22 19:07:59 -04:00
|
|
|
|
2016-12-21 21:13:14 -05:00
|
|
|
class SearchIndexer
|
2023-02-19 19:53:35 -05:00
|
|
|
MIN_POST_BLURB_INDEX_VERSION = 4
|
|
|
|
|
|
|
|
POST_INDEX_VERSION = 5
|
|
|
|
TOPIC_INDEX_VERSION = 4
|
2020-07-23 02:10:05 -04:00
|
|
|
CATEGORY_INDEX_VERSION = 3
|
|
|
|
USER_INDEX_VERSION = 3
|
|
|
|
TAG_INDEX_VERSION = 3
|
2023-02-19 19:53:35 -05:00
|
|
|
|
|
|
|
# version to apply when issuing a background reindex
|
2019-04-01 21:52:59 -04:00
|
|
|
REINDEX_VERSION = 0
|
2023-02-02 17:55:28 -05:00
|
|
|
TS_VECTOR_PARSE_REGEX = /('([^']*|'')*'\:)(([0-9]+[A-D]?,?)+)/
|
2016-12-21 21:13:14 -05:00
|
|
|
|
|
|
|
def self.disable
|
|
|
|
@disabled = true
|
|
|
|
end
|
|
|
|
|
|
|
|
def self.enable
|
|
|
|
@disabled = false
|
|
|
|
end
|
2013-02-05 14:16:51 -05:00
|
|
|
|
2024-02-20 12:24:30 -05:00
|
|
|
def self.with_indexing
|
|
|
|
prior = @disabled
|
|
|
|
enable
|
|
|
|
yield
|
|
|
|
ensure
|
|
|
|
@disabled = prior
|
|
|
|
end
|
|
|
|
|
2020-08-14 11:10:08 -04:00
|
|
|
def self.update_index(table:, id:, a_weight: nil, b_weight: nil, c_weight: nil, d_weight: nil)
|
2022-04-06 13:23:30 -04:00
|
|
|
raw_data = { a: a_weight, b: b_weight, c: c_weight, d: d_weight }
|
2020-08-14 11:10:08 -04:00
|
|
|
|
2022-04-06 13:23:30 -04:00
|
|
|
# The version used in excerpts
|
|
|
|
search_data = raw_data.transform_values { |data| Search.prepare_data(data || "", :index) }
|
2018-02-19 22:41:00 -05:00
|
|
|
|
2022-04-06 13:23:30 -04:00
|
|
|
# The version used to build the index
|
|
|
|
indexed_data =
|
|
|
|
search_data.transform_values do |data|
|
|
|
|
data.gsub(/\S+/) { |word| word[0...SiteSetting.search_max_indexed_word_length] }
|
|
|
|
end
|
|
|
|
|
2018-02-19 22:41:00 -05:00
|
|
|
table_name = "#{table}_search_data"
|
|
|
|
foreign_key = "#{table}_id"
|
2016-07-25 03:12:01 -04:00
|
|
|
|
2013-07-22 19:07:59 -04:00
|
|
|
# for user login and name use "simple" lowercase stemmer
|
2017-07-31 15:28:48 -04:00
|
|
|
stemmer = table == "user" ? "simple" : Search.ts_config
|
2013-07-22 19:07:59 -04:00
|
|
|
|
2018-02-19 22:41:00 -05:00
|
|
|
ranked_index = <<~SQL
|
2022-03-07 16:03:10 -05:00
|
|
|
setweight(to_tsvector('#{stemmer}', #{Search.wrap_unaccent("coalesce(:a,''))")}, 'A') ||
|
|
|
|
setweight(to_tsvector('#{stemmer}', #{Search.wrap_unaccent("coalesce(:b,''))")}, 'B') ||
|
|
|
|
setweight(to_tsvector('#{stemmer}', #{Search.wrap_unaccent("coalesce(:c,''))")}, 'C') ||
|
|
|
|
setweight(to_tsvector('#{stemmer}', #{Search.wrap_unaccent("coalesce(:d,''))")}, 'D')
|
2018-02-19 22:41:00 -05:00
|
|
|
SQL
|
|
|
|
|
2022-04-06 13:23:30 -04:00
|
|
|
tsvector = DB.query_single("SELECT #{ranked_index}", indexed_data)[0]
|
2020-07-09 02:56:02 -04:00
|
|
|
additional_lexemes = []
|
|
|
|
|
2023-02-02 17:55:28 -05:00
|
|
|
# we also want to index parts of a domain name
|
|
|
|
# that way stemmed single word searches will match
|
|
|
|
additional_words = []
|
|
|
|
|
2020-07-09 02:56:02 -04:00
|
|
|
tsvector
|
|
|
|
.scan(/'(([a-zA-Z0-9]+\.)+[a-zA-Z0-9]+)'\:([\w+,]+)/)
|
|
|
|
.reduce(additional_lexemes) do |array, (lexeme, _, positions)|
|
|
|
|
count = 0
|
2023-01-09 07:20:10 -05:00
|
|
|
|
2023-01-20 13:52:49 -05:00
|
|
|
if lexeme !~ /\A(\d+\.)?(\d+\.)*(\*|\d+)\z/
|
2020-07-27 02:46:44 -04:00
|
|
|
loop do
|
|
|
|
count += 1
|
|
|
|
break if count >= 10 # Safeguard here to prevent infinite loop when a term has many dots
|
2023-02-02 17:55:28 -05:00
|
|
|
term, _, remaining = lexeme.partition(".")
|
2020-07-27 02:46:44 -04:00
|
|
|
break if remaining.blank?
|
2023-02-02 17:55:28 -05:00
|
|
|
|
|
|
|
additional_words << [term, positions]
|
|
|
|
|
2020-07-27 03:22:54 -04:00
|
|
|
array << "'#{remaining}':#{positions}"
|
2020-07-27 02:46:44 -04:00
|
|
|
lexeme = remaining
|
2023-01-09 07:20:10 -05:00
|
|
|
end
|
2020-07-27 02:46:44 -04:00
|
|
|
end
|
2020-07-09 02:56:02 -04:00
|
|
|
|
|
|
|
array
|
|
|
|
end
|
|
|
|
|
2023-02-02 17:55:28 -05:00
|
|
|
extra_domain_word_terms =
|
|
|
|
if additional_words.length > 0
|
|
|
|
DB
|
|
|
|
.query_single(
|
|
|
|
"SELECT to_tsvector(?, ?)",
|
|
|
|
stemmer,
|
|
|
|
additional_words.map { |term, _| term }.join(" "),
|
|
|
|
)
|
|
|
|
.first
|
|
|
|
.scan(TS_VECTOR_PARSE_REGEX)
|
|
|
|
.map do |term, _, indexes|
|
|
|
|
new_indexes =
|
2023-03-20 00:43:08 -04:00
|
|
|
indexes
|
|
|
|
.split(",")
|
|
|
|
.map do |index|
|
|
|
|
existing_positions = additional_words[index.to_i - 1]
|
|
|
|
if existing_positions
|
|
|
|
existing_positions[1]
|
|
|
|
else
|
|
|
|
index
|
|
|
|
end
|
|
|
|
end
|
|
|
|
.join(",")
|
2023-02-02 17:55:28 -05:00
|
|
|
"#{term}#{new_indexes}"
|
|
|
|
end
|
|
|
|
.join(" ")
|
|
|
|
end
|
|
|
|
|
|
|
|
tsvector = "#{tsvector} #{additional_lexemes.join(" ")} #{extra_domain_word_terms}"
|
2020-07-09 02:56:02 -04:00
|
|
|
|
2023-01-30 20:41:31 -05:00
|
|
|
if (max_dupes = SiteSetting.max_duplicate_search_index_terms) > 0
|
|
|
|
reduced = []
|
|
|
|
tsvector
|
2023-02-02 17:55:28 -05:00
|
|
|
.scan(TS_VECTOR_PARSE_REGEX)
|
2023-02-01 20:17:19 -05:00
|
|
|
.each do |term, _, indexes|
|
2023-01-30 20:41:31 -05:00
|
|
|
family_counts = Hash.new(0)
|
|
|
|
new_index_array = []
|
|
|
|
|
|
|
|
indexes
|
|
|
|
.split(",")
|
|
|
|
.each do |index|
|
|
|
|
family = nil
|
|
|
|
family = index[-1] if index[-1].match?(/[A-D]/)
|
2023-05-09 21:47:58 -04:00
|
|
|
# title dupes can completely dominate the index
|
|
|
|
# so we limit them to 1
|
|
|
|
if (family_counts[family] += 1) <= (family == "A" ? 1 : max_dupes)
|
2023-01-30 20:41:31 -05:00
|
|
|
new_index_array << index
|
|
|
|
end
|
|
|
|
end
|
|
|
|
reduced << "#{term.strip}#{new_index_array.join(",")}"
|
|
|
|
end
|
|
|
|
tsvector = reduced.join(" ")
|
|
|
|
end
|
|
|
|
|
2020-08-06 00:25:03 -04:00
|
|
|
indexed_data =
|
|
|
|
if table.to_s == "post"
|
2022-04-06 13:23:30 -04:00
|
|
|
clean_post_raw_data!(search_data[:d])
|
2020-08-06 00:25:03 -04:00
|
|
|
else
|
2022-04-06 13:23:30 -04:00
|
|
|
search_data.values.select { |d| d.length > 0 }.join(" ")
|
2020-08-06 00:25:03 -04:00
|
|
|
end
|
|
|
|
|
2020-07-09 02:56:02 -04:00
|
|
|
params = {
|
2020-08-14 04:13:54 -04:00
|
|
|
"raw_data" => indexed_data,
|
|
|
|
"#{foreign_key}" => id,
|
|
|
|
"locale" => SiteSetting.default_locale,
|
|
|
|
"version" => const_get("#{table.upcase}_INDEX_VERSION"),
|
|
|
|
"search_data" => tsvector,
|
2018-02-19 22:41:00 -05:00
|
|
|
}
|
|
|
|
|
2020-08-18 02:51:17 -04:00
|
|
|
yield params if block_given?
|
2020-08-14 04:13:54 -04:00
|
|
|
table_name.camelize.constantize.upsert(params)
|
|
|
|
rescue => e
|
2020-08-14 11:10:08 -04:00
|
|
|
if Rails.env.test?
|
|
|
|
raise
|
|
|
|
else
|
|
|
|
# TODO is there any way we can safely avoid this?
|
|
|
|
# best way is probably pushing search indexer into a dedicated process so it no longer happens on save
|
|
|
|
# instead in the post processor
|
|
|
|
Discourse.warn_exception(
|
|
|
|
e,
|
2020-08-20 23:02:00 -04:00
|
|
|
message: "Unexpected error while indexing #{table} for search",
|
|
|
|
env: {
|
|
|
|
id: id,
|
|
|
|
},
|
2020-08-14 11:10:08 -04:00
|
|
|
)
|
|
|
|
end
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2014-08-08 01:50:26 -04:00
|
|
|
def self.update_topics_index(topic_id, title, cooked)
|
2021-05-20 21:43:47 -04:00
|
|
|
# a bit inconsistent that we use title as A and body as B when in
|
2020-07-17 04:27:30 -04:00
|
|
|
# the post index body is D
|
2020-08-14 11:10:08 -04:00
|
|
|
update_index(
|
|
|
|
table: "topic",
|
|
|
|
id: topic_id,
|
|
|
|
a_weight: title,
|
2022-03-07 16:03:10 -05:00
|
|
|
b_weight: HtmlScrubber.scrub(cooked)[0...Topic::MAX_SIMILAR_BODY_LENGTH],
|
2020-08-14 11:10:08 -04:00
|
|
|
)
|
2014-08-08 01:50:26 -04:00
|
|
|
end
|
|
|
|
|
2022-05-25 01:08:36 -04:00
|
|
|
def self.update_posts_index(
|
|
|
|
post_id:,
|
|
|
|
topic_title:,
|
|
|
|
category_name:,
|
|
|
|
topic_tags:,
|
|
|
|
cooked:,
|
|
|
|
private_message:
|
|
|
|
)
|
2020-08-14 11:10:08 -04:00
|
|
|
update_index(
|
|
|
|
table: "post",
|
|
|
|
id: post_id,
|
|
|
|
a_weight: topic_title,
|
|
|
|
b_weight: category_name,
|
2022-05-25 01:08:36 -04:00
|
|
|
c_weight: topic_tags,
|
2022-02-07 16:03:01 -05:00
|
|
|
# The tsvector resulted from parsing a string can be double the size of
|
|
|
|
# the original string. Since there is no way to estimate the length of
|
|
|
|
# the expected tsvector, we limit the input to ~50% of the maximum
|
|
|
|
# length of a tsvector (1_048_576 bytes).
|
2022-03-07 16:03:10 -05:00
|
|
|
d_weight: HtmlScrubber.scrub(cooked)[0..600_000],
|
2020-08-18 02:51:17 -04:00
|
|
|
) { |params| params["private_message"] = private_message }
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2021-04-27 01:52:45 -04:00
|
|
|
def self.update_users_index(user_id, username, name, custom_fields)
|
2020-08-14 11:10:08 -04:00
|
|
|
update_index(
|
|
|
|
table: "user",
|
|
|
|
id: user_id,
|
|
|
|
a_weight: username,
|
2021-04-27 01:52:45 -04:00
|
|
|
b_weight: name,
|
|
|
|
c_weight: custom_fields,
|
2020-08-14 11:10:08 -04:00
|
|
|
)
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
2013-02-07 10:45:24 -05:00
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
def self.update_categories_index(category_id, name)
|
2020-08-14 11:10:08 -04:00
|
|
|
update_index(table: "category", id: category_id, a_weight: name)
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2017-08-25 11:52:18 -04:00
|
|
|
def self.update_tags_index(tag_id, name)
|
2020-08-14 11:10:08 -04:00
|
|
|
update_index(table: "tag", id: tag_id, a_weight: name.downcase)
|
2018-02-19 22:41:00 -05:00
|
|
|
end
|
|
|
|
|
2020-07-16 23:12:31 -04:00
|
|
|
def self.queue_category_posts_reindex(category_id)
|
|
|
|
return if @disabled
|
|
|
|
|
|
|
|
DB.exec(<<~SQL, category_id: category_id, version: REINDEX_VERSION)
|
|
|
|
UPDATE post_search_data
|
|
|
|
SET version = :version
|
|
|
|
FROM posts
|
|
|
|
INNER JOIN topics ON posts.topic_id = topics.id
|
|
|
|
INNER JOIN categories ON topics.category_id = categories.id
|
|
|
|
WHERE post_search_data.post_id = posts.id
|
|
|
|
AND categories.id = :category_id
|
|
|
|
SQL
|
|
|
|
end
|
|
|
|
|
2021-04-27 01:52:45 -04:00
|
|
|
def self.queue_users_reindex(user_ids)
|
|
|
|
return if @disabled
|
|
|
|
|
|
|
|
DB.exec(<<~SQL, user_ids: user_ids, version: REINDEX_VERSION)
|
|
|
|
UPDATE user_search_data
|
|
|
|
SET version = :version
|
|
|
|
WHERE user_search_data.user_id IN (:user_ids)
|
|
|
|
SQL
|
|
|
|
end
|
|
|
|
|
2018-02-19 22:41:00 -05:00
|
|
|
def self.queue_post_reindex(topic_id)
|
|
|
|
return if @disabled
|
|
|
|
|
2019-04-01 21:52:59 -04:00
|
|
|
DB.exec(<<~SQL, topic_id: topic_id, version: REINDEX_VERSION)
|
2018-02-19 22:41:00 -05:00
|
|
|
UPDATE post_search_data
|
2019-04-01 21:52:59 -04:00
|
|
|
SET version = :version
|
|
|
|
FROM posts
|
|
|
|
WHERE post_search_data.post_id = posts.id
|
|
|
|
AND posts.topic_id = :topic_id
|
2018-02-19 22:41:00 -05:00
|
|
|
SQL
|
2017-08-25 11:52:18 -04:00
|
|
|
end
|
|
|
|
|
2017-08-16 07:38:34 -04:00
|
|
|
def self.index(obj, force: false)
|
2016-12-21 21:13:14 -05:00
|
|
|
return if @disabled
|
|
|
|
|
2018-08-23 11:13:52 -04:00
|
|
|
category_name = nil
|
|
|
|
tag_names = nil
|
2018-02-19 22:41:00 -05:00
|
|
|
topic = nil
|
|
|
|
|
|
|
|
if Topic === obj
|
|
|
|
topic = obj
|
|
|
|
elsif Post === obj
|
|
|
|
topic = obj.topic
|
|
|
|
end
|
|
|
|
|
|
|
|
category_name = topic.category&.name if topic
|
2020-07-17 04:27:30 -04:00
|
|
|
|
2019-12-04 13:33:51 -05:00
|
|
|
if topic
|
2020-07-17 04:27:30 -04:00
|
|
|
tags = topic.tags.select(:id, :name).to_a
|
|
|
|
|
|
|
|
if tags.present?
|
2019-12-04 13:33:51 -05:00
|
|
|
tag_names =
|
|
|
|
(tags.map(&:name) + Tag.where(target_tag_id: tags.map(&:id)).pluck(:name)).join(" ")
|
|
|
|
end
|
|
|
|
end
|
2018-02-19 22:41:00 -05:00
|
|
|
|
2019-03-31 22:06:27 -04:00
|
|
|
if Post === obj && obj.raw.present? &&
|
2019-03-19 05:16:57 -04:00
|
|
|
(force || obj.saved_change_to_cooked? || obj.saved_change_to_topic_id?)
|
2018-02-19 22:41:00 -05:00
|
|
|
if topic
|
2020-08-18 02:51:17 -04:00
|
|
|
SearchIndexer.update_posts_index(
|
|
|
|
post_id: obj.id,
|
|
|
|
topic_title: topic.title,
|
|
|
|
category_name: category_name,
|
|
|
|
topic_tags: tag_names,
|
|
|
|
cooked: obj.cooked,
|
2022-05-25 01:08:36 -04:00
|
|
|
private_message: topic.private_message?,
|
2020-08-18 02:51:17 -04:00
|
|
|
)
|
|
|
|
|
2018-02-19 22:41:00 -05:00
|
|
|
SearchIndexer.update_topics_index(topic.id, topic.title, obj.cooked) if obj.is_first_post?
|
2014-05-06 22:35:26 -04:00
|
|
|
end
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
2017-08-16 07:38:34 -04:00
|
|
|
|
2018-02-19 22:41:00 -05:00
|
|
|
if User === obj && (obj.saved_change_to_username? || obj.saved_change_to_name? || force)
|
2021-04-27 01:52:45 -04:00
|
|
|
SearchIndexer.update_users_index(
|
|
|
|
obj.id,
|
|
|
|
obj.username_lower || "",
|
|
|
|
obj.name ? obj.name.downcase : "",
|
|
|
|
obj.user_custom_fields.searchable.map(&:value).join(" "),
|
|
|
|
)
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2018-02-19 22:41:00 -05:00
|
|
|
if Topic === obj && (obj.saved_change_to_title? || force)
|
2013-02-05 14:16:51 -05:00
|
|
|
if obj.posts
|
2018-08-23 11:13:52 -04:00
|
|
|
if post = obj.posts.find_by(post_number: 1)
|
2020-08-18 02:51:17 -04:00
|
|
|
SearchIndexer.update_posts_index(
|
|
|
|
post_id: post.id,
|
|
|
|
topic_title: obj.title,
|
|
|
|
category_name: category_name,
|
|
|
|
topic_tags: tag_names,
|
|
|
|
cooked: post.cooked,
|
2022-05-25 01:08:36 -04:00
|
|
|
private_message: obj.private_message?,
|
2020-08-18 02:51:17 -04:00
|
|
|
)
|
|
|
|
|
2016-12-21 21:13:14 -05:00
|
|
|
SearchIndexer.update_topics_index(obj.id, obj.title, post.cooked)
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-19 22:41:00 -05:00
|
|
|
if Category === obj && (obj.saved_change_to_name? || force)
|
2020-11-09 00:35:37 -05:00
|
|
|
SearchIndexer.queue_category_posts_reindex(obj.id)
|
2016-12-21 21:13:14 -05:00
|
|
|
SearchIndexer.update_categories_index(obj.id, obj.name)
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
2017-08-25 11:52:18 -04:00
|
|
|
|
2018-02-19 22:41:00 -05:00
|
|
|
if Tag === obj && (obj.saved_change_to_name? || force)
|
2017-08-25 11:52:18 -04:00
|
|
|
SearchIndexer.update_tags_index(obj.id, obj.name)
|
|
|
|
end
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2020-08-06 00:25:03 -04:00
|
|
|
def self.clean_post_raw_data!(raw_data)
|
|
|
|
urls = Set.new
|
|
|
|
raw_data.scan(Discourse::Utils::URI_REGEXP) { urls << $& }
|
|
|
|
|
|
|
|
urls.each do |url|
|
|
|
|
begin
|
|
|
|
case File.extname(URI(url).path || "")
|
|
|
|
when Oneboxer::VIDEO_REGEX
|
|
|
|
raw_data.gsub!(url, I18n.t("search.video"))
|
|
|
|
when Oneboxer::AUDIO_REGEX
|
|
|
|
raw_data.gsub!(url, I18n.t("search.audio"))
|
|
|
|
end
|
|
|
|
rescue URI::InvalidURIError
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
raw_data
|
|
|
|
end
|
|
|
|
private_class_method :clean_post_raw_data!
|
|
|
|
|
2013-02-05 14:16:51 -05:00
|
|
|
class HtmlScrubber < Nokogiri::XML::SAX::Document
|
|
|
|
attr_reader :scrubbed
|
|
|
|
|
2022-03-07 16:03:10 -05:00
|
|
|
def initialize
|
2018-02-19 22:41:00 -05:00
|
|
|
@scrubbed = +""
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2022-03-07 16:03:10 -05:00
|
|
|
def self.scrub(html)
|
2018-08-23 12:00:07 -04:00
|
|
|
return +"" if html.blank?
|
|
|
|
|
2021-04-07 03:02:00 -04:00
|
|
|
begin
|
|
|
|
document = Nokogiri.HTML5("<div>#{html}</div>", nil, Encoding::UTF_8.to_s)
|
|
|
|
rescue ArgumentError
|
|
|
|
return +""
|
|
|
|
end
|
2019-03-31 22:14:29 -04:00
|
|
|
|
|
|
|
nodes = document.css("div.#{CookedPostProcessor::LIGHTBOX_WRAPPER_CSS_CLASS}")
|
2019-04-01 04:18:54 -04:00
|
|
|
|
|
|
|
if nodes.present?
|
|
|
|
nodes.each do |node|
|
|
|
|
node.traverse do |child_node|
|
|
|
|
next if child_node == node
|
|
|
|
|
|
|
|
if %w[a img].exclude?(child_node.name)
|
|
|
|
child_node.remove
|
|
|
|
elsif child_node.name == "a"
|
|
|
|
ATTRIBUTES.each { |attribute| child_node.remove_attribute(attribute) }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-03-31 22:14:29 -04:00
|
|
|
|
2022-01-24 00:35:30 -05:00
|
|
|
document.css("img.emoji").each { |node| node.remove_attribute("alt") }
|
2019-04-29 11:26:29 -04:00
|
|
|
|
2019-03-31 22:14:29 -04:00
|
|
|
document
|
|
|
|
.css("a[href]")
|
|
|
|
.each do |node|
|
2019-04-29 11:15:55 -04:00
|
|
|
if node["href"] == node.text || MENTION_CLASSES.include?(node["class"])
|
|
|
|
node.remove_attribute("href")
|
|
|
|
end
|
2022-04-06 16:06:45 -04:00
|
|
|
|
|
|
|
if node["class"] == "anchor" && node["href"].starts_with?("#")
|
|
|
|
node.remove_attribute("href")
|
2023-01-09 07:20:10 -05:00
|
|
|
end
|
2022-04-06 16:06:45 -04:00
|
|
|
end
|
2019-03-31 22:14:29 -04:00
|
|
|
|
2022-03-07 16:03:10 -05:00
|
|
|
html_scrubber = new
|
|
|
|
Nokogiri::HTML::SAX::Parser.new(html_scrubber).parse(document.to_html)
|
|
|
|
html_scrubber.scrubbed.squish
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2024-10-15 22:09:07 -04:00
|
|
|
MENTION_CLASSES = %w[mention mention-group]
|
|
|
|
ATTRIBUTES = %w[alt title href data-video-title]
|
2018-08-23 11:13:52 -04:00
|
|
|
|
2019-03-31 22:14:29 -04:00
|
|
|
def start_element(_name, attributes = [])
|
2013-02-05 14:16:51 -05:00
|
|
|
attributes = Hash[*attributes.flatten]
|
2018-08-23 11:13:52 -04:00
|
|
|
|
2019-03-31 22:14:29 -04:00
|
|
|
ATTRIBUTES.each do |attribute_name|
|
|
|
|
if attributes[attribute_name].present? &&
|
|
|
|
!(attribute_name == "href" && UrlHelper.is_local(attributes[attribute_name]))
|
|
|
|
characters(attributes[attribute_name])
|
2018-09-13 12:53:53 -04:00
|
|
|
end
|
2018-08-19 20:39:19 -04:00
|
|
|
end
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
|
2018-08-23 20:00:51 -04:00
|
|
|
def characters(str)
|
2018-08-30 21:46:55 -04:00
|
|
|
scrubbed << " #{str} "
|
2013-02-05 14:16:51 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|