2023-02-22 20:46:53 -03:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module ::Jobs
|
2023-02-24 07:53:43 -03:00
|
|
|
class ToxicityClassifyChatMessage < ::Jobs::Base
|
2023-02-22 20:46:53 -03:00
|
|
|
def execute(args)
|
|
|
|
return unless SiteSetting.ai_toxicity_enabled
|
|
|
|
|
2023-02-24 07:53:43 -03:00
|
|
|
return if (chat_message_id = args[:chat_message_id]).blank?
|
2023-02-22 20:46:53 -03:00
|
|
|
|
2023-03-17 15:15:38 +01:00
|
|
|
chat_message = ::Chat::Message.find_by(id: chat_message_id)
|
2023-02-22 20:46:53 -03:00
|
|
|
return if chat_message&.message.blank?
|
|
|
|
|
2023-03-14 16:03:50 -03:00
|
|
|
DiscourseAi::ChatMessageClassificator.new(
|
|
|
|
DiscourseAi::Toxicity::ToxicityClassification.new,
|
2023-02-24 13:25:02 -03:00
|
|
|
).classify!(chat_message)
|
2023-02-22 20:46:53 -03:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|