2023-02-22 18:46:53 -05:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2023-03-14 15:03:50 -04:00
|
|
|
module ::DiscourseAi
|
2023-02-22 18:46:53 -05:00
|
|
|
module Toxicity
|
2023-02-24 05:53:43 -05:00
|
|
|
class ScanQueue
|
2023-02-22 18:46:53 -05:00
|
|
|
class << self
|
2023-02-24 05:53:43 -05:00
|
|
|
def enqueue_post(post)
|
2023-02-22 18:46:53 -05:00
|
|
|
return if bypass?(post)
|
|
|
|
Jobs.enqueue(:toxicity_classify_post, post_id: post.id)
|
|
|
|
end
|
|
|
|
|
2023-02-24 05:53:43 -05:00
|
|
|
def enqueue_chat_message(chat_message)
|
2023-02-22 18:46:53 -05:00
|
|
|
return if bypass?(chat_message)
|
|
|
|
Jobs.enqueue(:toxicity_classify_chat_message, chat_message_id: chat_message.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
def bypass?(content)
|
|
|
|
!SiteSetting.ai_toxicity_enabled || group_bypass?(content.user)
|
|
|
|
end
|
|
|
|
|
|
|
|
def group_bypass?(user)
|
2023-02-24 05:53:43 -05:00
|
|
|
user.groups.pluck(:id).intersection(SiteSetting.ai_toxicity_groups_bypass_map).present?
|
2023-02-22 18:46:53 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|