DEV: Reorganize files and add an entry point for each module
This commit is contained in:
parent
a73931c151
commit
1afa274b99
|
@ -5,7 +5,7 @@ module DiscourseAI
|
|||
class EntryPoint
|
||||
def inject_into(plugin)
|
||||
require_relative "evaluation.rb"
|
||||
require_relative "jobs/regular/evaluate_content.rb"
|
||||
require_relative "../../../app/jobs/regular/modules/nsfw/evaluate_content.rb"
|
||||
|
||||
plugin.add_model_callback(Upload, :after_create) do
|
||||
Jobs.enqueue(:evaluate_content, upload_id: self.id)
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
# frozen_string_literal: true
|
||||
module DiscourseAI
|
||||
module Sentiment
|
||||
class EntryPoint
|
||||
def inject_into(plugin)
|
||||
require_relative "event_handler.rb"
|
||||
require_relative "post_classifier.rb"
|
||||
require_relative "../../../app/jobs/regular/modules/sentiment/sentiment_classify_post.rb"
|
||||
|
||||
plugin.on(:post_created) do |post|
|
||||
DiscourseAI::Sentiment::EventHandler.handle_post_async(post)
|
||||
end
|
||||
|
||||
plugin.on(:post_edited) do |post|
|
||||
DiscourseAI::Sentiment::EventHandler.handle_post_async(post)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,34 @@
|
|||
# frozen_string_literal: true
|
||||
module DiscourseAI
|
||||
module Toxicity
|
||||
class EntryPoint
|
||||
def inject_into(plugin)
|
||||
require_relative "event_handler.rb"
|
||||
require_relative "classifier.rb"
|
||||
require_relative "post_classifier.rb"
|
||||
require_relative "chat_message_classifier.rb"
|
||||
|
||||
jobs_base_path = "../../../app/jobs/regular/modules/toxicity"
|
||||
|
||||
require_relative "#{jobs_base_path}/toxicity_classify_post.rb"
|
||||
require_relative "#{jobs_base_path}/toxicity_classify_chat_message.rb"
|
||||
|
||||
plugin.on(:post_created) do |post|
|
||||
DiscourseAI::Toxicity::EventHandler.handle_post_async(post)
|
||||
end
|
||||
|
||||
plugin.on(:post_edited) do |post|
|
||||
DiscourseAI::Toxicity::EventHandler.handle_post_async(post)
|
||||
end
|
||||
|
||||
plugin.on(:chat_message_created) do |chat_message|
|
||||
DiscourseAI::Toxicity::EventHandler.handle_chat_async(chat_message)
|
||||
end
|
||||
|
||||
plugin.on(:chat_message_edited) do |chat_message|
|
||||
DiscourseAI::Toxicity::EventHandler.handle_chat_async(chat_message)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
36
plugin.rb
36
plugin.rb
|
@ -15,32 +15,16 @@ after_initialize do
|
|||
end
|
||||
|
||||
require_relative "lib/shared/inference_manager.rb"
|
||||
require_relative "lib/modules/toxicity/event_handler.rb"
|
||||
require_relative "lib/modules/toxicity/classifier.rb"
|
||||
require_relative "lib/modules/toxicity/post_classifier.rb"
|
||||
require_relative "lib/modules/toxicity/chat_message_classifier.rb"
|
||||
require_relative "app/jobs/regular/modules/toxicity/toxicity_classify_post.rb"
|
||||
require_relative "app/jobs/regular/modules/toxicity/toxicity_classify_chat_message.rb"
|
||||
|
||||
require_relative "lib/modules/sentiment/event_handler.rb"
|
||||
require_relative "lib/modules/sentiment/post_classifier.rb"
|
||||
require_relative "app/jobs/regular/modules/sentiment/sentiment_classify_post.rb"
|
||||
|
||||
on(:post_created) do |post|
|
||||
DiscourseAI::Toxicity::EventHandler.handle_post_async(post)
|
||||
DiscourseAI::Sentiment::EventHandler.handle_post_async(post)
|
||||
end
|
||||
on(:post_edited) do |post|
|
||||
DiscourseAI::Toxicity::EventHandler.handle_post_async(post)
|
||||
DiscourseAI::Sentiment::EventHandler.handle_post_async(post)
|
||||
end
|
||||
on(:chat_message_created) do |chat_message|
|
||||
DiscourseAI::Toxicity::EventHandler.handle_chat_async(chat_message)
|
||||
end
|
||||
on(:chat_message_edited) do |chat_message|
|
||||
DiscourseAI::Toxicity::EventHandler.handle_chat_async(chat_message)
|
||||
end
|
||||
|
||||
require_relative "lib/modules/nsfw/entry_point.rb"
|
||||
DiscourseAI::NSFW::EntryPoint.new.inject_into(self)
|
||||
require_relative "lib/modules/toxicity/entry_point.rb"
|
||||
require_relative "lib/modules/sentiment/entry_point.rb"
|
||||
|
||||
modules = [
|
||||
DiscourseAI::NSFW::EntryPoint,
|
||||
DiscourseAI::Toxicity::EntryPoint,
|
||||
DiscourseAI::Sentiment::EntryPoint,
|
||||
]
|
||||
|
||||
modules.each { |a_module| a_module.new.inject_into(self) }
|
||||
end
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require "rails_helper"
|
||||
require_relative "../../../../../support/nsfw_inference_stubs"
|
||||
require_relative "../../../../support/nsfw_inference_stubs"
|
||||
|
||||
describe Jobs::EvaluateContent do
|
||||
fab!(:image) { Fabricate(:s3_image_upload) }
|
|
@ -8,7 +8,7 @@ describe DiscourseAI::NSFW::Evaluation do
|
|||
|
||||
fab!(:image) { Fabricate(:s3_image_upload) }
|
||||
|
||||
let(:available_models) { DiscourseAI::NSFW::Evaluation::AVAILABLE_MODELS }
|
||||
let(:available_models) { SiteSetting.ai_nsfw_models.split("|") }
|
||||
|
||||
describe "perform" do
|
||||
context "when we determine content is NSFW" do
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# frozen_string_literal: true
|
||||
class NSFWInferenceStubs
|
||||
class << self
|
||||
def endpoint
|
||||
|
|
Loading…
Reference in New Issue