diff --git a/lib/modules/ai_bot/jobs/regular/create_ai_reply.rb b/app/jobs/regular/create_ai_reply.rb similarity index 100% rename from lib/modules/ai_bot/jobs/regular/create_ai_reply.rb rename to app/jobs/regular/create_ai_reply.rb diff --git a/lib/modules/nsfw/jobs/regular/evaluate_post_uploads.rb b/app/jobs/regular/evaluate_post_uploads.rb similarity index 80% rename from lib/modules/nsfw/jobs/regular/evaluate_post_uploads.rb rename to app/jobs/regular/evaluate_post_uploads.rb index 80dc75d5..6327d06a 100644 --- a/lib/modules/nsfw/jobs/regular/evaluate_post_uploads.rb +++ b/app/jobs/regular/evaluate_post_uploads.rb @@ -11,7 +11,7 @@ module Jobs return if post.uploads.none? { |u| FileHelper.is_supported_image?(u.url) } - DiscourseAi::PostClassificator.new(DiscourseAi::NSFW::NSFWClassification.new).classify!(post) + DiscourseAi::PostClassificator.new(DiscourseAi::Nsfw::Classification.new).classify!(post) end end end diff --git a/lib/modules/ai_helper/jobs/regular/generate_chat_thread_title.rb b/app/jobs/regular/generate_chat_thread_title.rb similarity index 100% rename from lib/modules/ai_helper/jobs/regular/generate_chat_thread_title.rb rename to app/jobs/regular/generate_chat_thread_title.rb diff --git a/lib/modules/embeddings/jobs/regular/generate_embeddings.rb b/app/jobs/regular/generate_embeddings.rb similarity index 100% rename from lib/modules/embeddings/jobs/regular/generate_embeddings.rb rename to app/jobs/regular/generate_embeddings.rb diff --git a/lib/modules/sentiment/jobs/regular/post_sentiment_analysis.rb b/app/jobs/regular/post_sentiment_analysis.rb similarity index 100% rename from lib/modules/sentiment/jobs/regular/post_sentiment_analysis.rb rename to app/jobs/regular/post_sentiment_analysis.rb diff --git a/lib/modules/toxicity/jobs/regular/toxicity_classify_chat_message.rb b/app/jobs/regular/toxicity_classify_chat_message.rb similarity index 100% rename from lib/modules/toxicity/jobs/regular/toxicity_classify_chat_message.rb rename to app/jobs/regular/toxicity_classify_chat_message.rb diff --git a/lib/modules/toxicity/jobs/regular/toxicity_classify_post.rb b/app/jobs/regular/toxicity_classify_post.rb similarity index 100% rename from lib/modules/toxicity/jobs/regular/toxicity_classify_post.rb rename to app/jobs/regular/toxicity_classify_post.rb diff --git a/lib/modules/ai_bot/jobs/regular/update_ai_bot_pm_title.rb b/app/jobs/regular/update_ai_bot_pm_title.rb similarity index 100% rename from lib/modules/ai_bot/jobs/regular/update_ai_bot_pm_title.rb rename to app/jobs/regular/update_ai_bot_pm_title.rb diff --git a/lib/modules/embeddings/jobs/scheduled/embeddings_backfill.rb b/app/jobs/scheduled/embeddings_backfill.rb similarity index 100% rename from lib/modules/embeddings/jobs/scheduled/embeddings_backfill.rb rename to app/jobs/scheduled/embeddings_backfill.rb diff --git a/config/locales/server.en.yml b/config/locales/server.en.yml index f7357ff4..8216e1e5 100644 --- a/config/locales/server.en.yml +++ b/config/locales/server.en.yml @@ -53,6 +53,8 @@ en: ai_helper_allowed_in_pm: "Enable the composer's AI helper in PMs." ai_helper_model: "Model to use for the AI helper." ai_helper_custom_prompts_allowed_groups: "Users on these groups will see the custom prompt option in the AI helper." + ai_helper_automatic_chat_thread_title_delay: "Delay in minutes before the AI helper automatically sets the chat thread title." + ai_helper_automatic_chat_thread_title: "Automatically set the chat thread titles based on thread contents." ai_embeddings_enabled: "Enable the embeddings module." ai_embeddings_discourse_service_api_endpoint: "URL where the API is running for the embeddings module" diff --git a/lib/discourse_automation/llm_triage.rb b/discourse_automation/llm_triage.rb similarity index 100% rename from lib/discourse_automation/llm_triage.rb rename to discourse_automation/llm_triage.rb diff --git a/lib/modules/ai_bot/anthropic_bot.rb b/lib/ai_bot/anthropic_bot.rb similarity index 100% rename from lib/modules/ai_bot/anthropic_bot.rb rename to lib/ai_bot/anthropic_bot.rb diff --git a/lib/modules/ai_bot/bot.rb b/lib/ai_bot/bot.rb similarity index 100% rename from lib/modules/ai_bot/bot.rb rename to lib/ai_bot/bot.rb diff --git a/lib/modules/ai_bot/commands/categories_command.rb b/lib/ai_bot/commands/categories_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/categories_command.rb rename to lib/ai_bot/commands/categories_command.rb diff --git a/lib/modules/ai_bot/commands/command.rb b/lib/ai_bot/commands/command.rb similarity index 100% rename from lib/modules/ai_bot/commands/command.rb rename to lib/ai_bot/commands/command.rb diff --git a/lib/modules/ai_bot/commands/dall_e_command.rb b/lib/ai_bot/commands/dall_e_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/dall_e_command.rb rename to lib/ai_bot/commands/dall_e_command.rb diff --git a/lib/modules/ai_bot/commands/db_schema_command.rb b/lib/ai_bot/commands/db_schema_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/db_schema_command.rb rename to lib/ai_bot/commands/db_schema_command.rb diff --git a/lib/modules/ai_bot/commands/google_command.rb b/lib/ai_bot/commands/google_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/google_command.rb rename to lib/ai_bot/commands/google_command.rb diff --git a/lib/modules/ai_bot/commands/image_command.rb b/lib/ai_bot/commands/image_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/image_command.rb rename to lib/ai_bot/commands/image_command.rb diff --git a/lib/ai_bot/commands/parameter.rb b/lib/ai_bot/commands/parameter.rb new file mode 100644 index 00000000..b4a22a55 --- /dev/null +++ b/lib/ai_bot/commands/parameter.rb @@ -0,0 +1,18 @@ +# frozen_string_literal: true +module DiscourseAi + module AiBot + module Commands + class Parameter + attr_reader :item_type, :name, :description, :type, :enum, :required + def initialize(name:, description:, type:, enum: nil, required: false, item_type: nil) + @name = name + @description = description + @type = type + @enum = enum + @required = required + @item_type = item_type + end + end + end + end +end diff --git a/lib/modules/ai_bot/commands/read_command.rb b/lib/ai_bot/commands/read_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/read_command.rb rename to lib/ai_bot/commands/read_command.rb diff --git a/lib/modules/ai_bot/commands/search_command.rb b/lib/ai_bot/commands/search_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/search_command.rb rename to lib/ai_bot/commands/search_command.rb diff --git a/lib/modules/ai_bot/commands/search_settings_command.rb b/lib/ai_bot/commands/search_settings_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/search_settings_command.rb rename to lib/ai_bot/commands/search_settings_command.rb diff --git a/lib/modules/ai_bot/commands/setting_context_command.rb b/lib/ai_bot/commands/setting_context_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/setting_context_command.rb rename to lib/ai_bot/commands/setting_context_command.rb diff --git a/lib/modules/ai_bot/commands/summarize_command.rb b/lib/ai_bot/commands/summarize_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/summarize_command.rb rename to lib/ai_bot/commands/summarize_command.rb diff --git a/lib/modules/ai_bot/commands/tags_command.rb b/lib/ai_bot/commands/tags_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/tags_command.rb rename to lib/ai_bot/commands/tags_command.rb diff --git a/lib/modules/ai_bot/commands/time_command.rb b/lib/ai_bot/commands/time_command.rb similarity index 100% rename from lib/modules/ai_bot/commands/time_command.rb rename to lib/ai_bot/commands/time_command.rb diff --git a/lib/modules/ai_bot/entry_point.rb b/lib/ai_bot/entry_point.rb similarity index 72% rename from lib/modules/ai_bot/entry_point.rb rename to lib/ai_bot/entry_point.rb index 6798ad56..76bfedf1 100644 --- a/lib/modules/ai_bot/entry_point.rb +++ b/lib/ai_bot/entry_point.rb @@ -27,36 +27,6 @@ module DiscourseAi end end - def load_files - require_relative "jobs/regular/create_ai_reply" - require_relative "jobs/regular/update_ai_bot_pm_title" - require_relative "bot" - require_relative "anthropic_bot" - require_relative "open_ai_bot" - require_relative "commands/command" - require_relative "commands/search_command" - require_relative "commands/categories_command" - require_relative "commands/tags_command" - require_relative "commands/time_command" - require_relative "commands/summarize_command" - require_relative "commands/image_command" - require_relative "commands/google_command" - require_relative "commands/read_command" - require_relative "commands/setting_context_command" - require_relative "commands/search_settings_command" - require_relative "commands/db_schema_command" - require_relative "commands/dall_e_command" - require_relative "personas/persona" - require_relative "personas/artist" - require_relative "personas/general" - require_relative "personas/sql_helper" - require_relative "personas/settings_explorer" - require_relative "personas/researcher" - require_relative "personas/creative" - require_relative "personas/dall_e_3" - require_relative "site_settings_extension" - end - def inject_into(plugin) plugin.on(:site_setting_changed) do |name, _old_value, _new_value| if name == :ai_bot_enabled_chat_bots || name == :ai_bot_enabled @@ -76,7 +46,7 @@ module DiscourseAi scope.user.in_any_groups?(SiteSetting.ai_bot_allowed_groups_map) end, ) do - Personas + DiscourseAi::AiBot::Personas .all(user: scope.user) .map do |persona| { id: persona.id, name: persona.name, description: persona.description } @@ -135,8 +105,8 @@ module DiscourseAi post.topic.custom_fields[REQUIRE_TITLE_UPDATE] = true post.topic.save_custom_fields end - Jobs.enqueue(:create_ai_reply, post_id: post.id, bot_user_id: bot_id) - Jobs.enqueue_in( + ::Jobs.enqueue(:create_ai_reply, post_id: post.id, bot_user_id: bot_id) + ::Jobs.enqueue_in( 5.minutes, :update_ai_bot_pm_title, post_id: post.id, diff --git a/lib/modules/ai_bot/open_ai_bot.rb b/lib/ai_bot/open_ai_bot.rb similarity index 100% rename from lib/modules/ai_bot/open_ai_bot.rb rename to lib/ai_bot/open_ai_bot.rb diff --git a/lib/ai_bot/personas.rb b/lib/ai_bot/personas.rb new file mode 100644 index 00000000..ec105986 --- /dev/null +++ b/lib/ai_bot/personas.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +module DiscourseAi + module AiBot + module Personas + def self.system_personas + @system_personas ||= { + Personas::General => -1, + Personas::SqlHelper => -2, + Personas::Artist => -3, + Personas::SettingsExplorer => -4, + Personas::Researcher => -5, + Personas::Creative => -6, + Personas::DallE3 => -7, + } + end + + def self.system_personas_by_id + @system_personas_by_id ||= system_personas.invert + end + + def self.all(user:) + # this needs to be dynamic cause site settings may change + all_available_commands = Persona.all_available_commands + + AiPersona.all_personas.filter do |persona| + next false if !user.in_any_groups?(persona.allowed_group_ids) + + if persona.system + instance = persona.new + ( + instance.required_commands == [] || + (instance.required_commands - all_available_commands).empty? + ) + else + true + end + end + end + + def self.find_by(id: nil, name: nil, user:) + all(user: user).find { |persona| persona.id == id || persona.name == name } + end + end + end +end diff --git a/lib/modules/ai_bot/personas/artist.rb b/lib/ai_bot/personas/artist.rb similarity index 100% rename from lib/modules/ai_bot/personas/artist.rb rename to lib/ai_bot/personas/artist.rb diff --git a/lib/modules/ai_bot/personas/creative.rb b/lib/ai_bot/personas/creative.rb similarity index 100% rename from lib/modules/ai_bot/personas/creative.rb rename to lib/ai_bot/personas/creative.rb diff --git a/lib/modules/ai_bot/personas/dall_e_3.rb b/lib/ai_bot/personas/dall_e_3.rb similarity index 100% rename from lib/modules/ai_bot/personas/dall_e_3.rb rename to lib/ai_bot/personas/dall_e_3.rb diff --git a/lib/modules/ai_bot/personas/general.rb b/lib/ai_bot/personas/general.rb similarity index 100% rename from lib/modules/ai_bot/personas/general.rb rename to lib/ai_bot/personas/general.rb diff --git a/lib/modules/ai_bot/personas/persona.rb b/lib/ai_bot/personas/persona.rb similarity index 76% rename from lib/modules/ai_bot/personas/persona.rb rename to lib/ai_bot/personas/persona.rb index 0ba7d48a..199216f8 100644 --- a/lib/modules/ai_bot/personas/persona.rb +++ b/lib/ai_bot/personas/persona.rb @@ -3,46 +3,6 @@ module DiscourseAi module AiBot module Personas - def self.system_personas - @system_personas ||= { - Personas::General => -1, - Personas::SqlHelper => -2, - Personas::Artist => -3, - Personas::SettingsExplorer => -4, - Personas::Researcher => -5, - Personas::Creative => -6, - Personas::DallE3 => -7, - } - end - - def self.system_personas_by_id - @system_personas_by_id ||= system_personas.invert - end - - def self.all(user:) - personas = - AiPersona.all_personas.filter { |persona| user.in_any_groups?(persona.allowed_group_ids) } - - # this needs to be dynamic cause site settings may change - all_available_commands = Persona.all_available_commands - - personas.filter do |persona| - if persona.system - instance = persona.new - ( - instance.required_commands == [] || - (instance.required_commands - all_available_commands).empty? - ) - else - true - end - end - end - - def self.find_by(id: nil, name: nil, user:) - all(user: user).find { |persona| persona.id == id || persona.name == name } - end - class Persona def self.name I18n.t("discourse_ai.ai_bot.personas.#{to_s.demodulize.underscore}.name") diff --git a/lib/modules/ai_bot/personas/researcher.rb b/lib/ai_bot/personas/researcher.rb similarity index 100% rename from lib/modules/ai_bot/personas/researcher.rb rename to lib/ai_bot/personas/researcher.rb diff --git a/lib/modules/ai_bot/personas/settings_explorer.rb b/lib/ai_bot/personas/settings_explorer.rb similarity index 100% rename from lib/modules/ai_bot/personas/settings_explorer.rb rename to lib/ai_bot/personas/settings_explorer.rb diff --git a/lib/modules/ai_bot/personas/sql_helper.rb b/lib/ai_bot/personas/sql_helper.rb similarity index 100% rename from lib/modules/ai_bot/personas/sql_helper.rb rename to lib/ai_bot/personas/sql_helper.rb diff --git a/lib/modules/ai_bot/site_settings_extension.rb b/lib/ai_bot/site_settings_extension.rb similarity index 100% rename from lib/modules/ai_bot/site_settings_extension.rb rename to lib/ai_bot/site_settings_extension.rb diff --git a/lib/modules/ai_helper/assistant.rb b/lib/ai_helper/assistant.rb similarity index 97% rename from lib/modules/ai_helper/assistant.rb rename to lib/ai_helper/assistant.rb index 04f8fb76..8444e225 100644 --- a/lib/modules/ai_helper/assistant.rb +++ b/lib/ai_helper/assistant.rb @@ -25,7 +25,7 @@ module DiscourseAi end def generate_and_send_prompt(completion_prompt, input, user) - llm = DiscourseAi::Completions::LLM.proxy(SiteSetting.ai_helper_model) + llm = DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model) generic_prompt = completion_prompt.messages_with_input(input) diff --git a/lib/modules/ai_helper/chat_thread_titler.rb b/lib/ai_helper/chat_thread_titler.rb similarity index 100% rename from lib/modules/ai_helper/chat_thread_titler.rb rename to lib/ai_helper/chat_thread_titler.rb diff --git a/lib/modules/ai_helper/entry_point.rb b/lib/ai_helper/entry_point.rb similarity index 69% rename from lib/modules/ai_helper/entry_point.rb rename to lib/ai_helper/entry_point.rb index ebb7c8bb..3ab9daa9 100644 --- a/lib/modules/ai_helper/entry_point.rb +++ b/lib/ai_helper/entry_point.rb @@ -2,15 +2,6 @@ module DiscourseAi module AiHelper class EntryPoint - def load_files - require_relative "chat_thread_titler" - require_relative "jobs/regular/generate_chat_thread_title" - require_relative "assistant" - require_relative "painter" - require_relative "semantic_categorizer" - require_relative "topic_helper" - end - def inject_into(plugin) plugin.register_seedfu_fixtures( Rails.root.join("plugins", "discourse-ai", "db", "fixtures", "ai_helper"), @@ -22,7 +13,7 @@ module DiscourseAi plugin.on(:chat_thread_created) do |thread| next unless SiteSetting.composer_ai_helper_enabled next unless SiteSetting.ai_helper_automatic_chat_thread_title - Jobs.enqueue_in( + ::Jobs.enqueue_in( SiteSetting.ai_helper_automatic_chat_thread_title_delay.minutes, :generate_chat_thread_title, thread_id: thread.id, diff --git a/lib/modules/ai_helper/painter.rb b/lib/ai_helper/painter.rb similarity index 94% rename from lib/modules/ai_helper/painter.rb rename to lib/ai_helper/painter.rb index 3ad40eb9..d08dc2fd 100644 --- a/lib/modules/ai_helper/painter.rb +++ b/lib/ai_helper/painter.rb @@ -35,7 +35,7 @@ module DiscourseAi You'll find the post between XML tags. TEXT - DiscourseAi::Completions::LLM.proxy(SiteSetting.ai_helper_model).completion!(prompt, user) + DiscourseAi::Completions::Llm.proxy(SiteSetting.ai_helper_model).completion!(prompt, user) end end end diff --git a/lib/modules/ai_helper/semantic_categorizer.rb b/lib/ai_helper/semantic_categorizer.rb similarity index 100% rename from lib/modules/ai_helper/semantic_categorizer.rb rename to lib/ai_helper/semantic_categorizer.rb diff --git a/lib/modules/ai_helper/topic_helper.rb b/lib/ai_helper/topic_helper.rb similarity index 100% rename from lib/modules/ai_helper/topic_helper.rb rename to lib/ai_helper/topic_helper.rb diff --git a/lib/shared/chat_message_classificator.rb b/lib/chat_message_classificator.rb similarity index 100% rename from lib/shared/chat_message_classificator.rb rename to lib/chat_message_classificator.rb diff --git a/lib/shared/classificator.rb b/lib/classificator.rb similarity index 100% rename from lib/shared/classificator.rb rename to lib/classificator.rb diff --git a/lib/completions/dialects/chat_gpt.rb b/lib/completions/dialects/chat_gpt.rb index 1f4166be..82595225 100644 --- a/lib/completions/dialects/chat_gpt.rb +++ b/lib/completions/dialects/chat_gpt.rb @@ -3,7 +3,7 @@ module DiscourseAi module Completions module Dialects - class ChatGPT + class ChatGpt def self.can_translate?(model_name) %w[gpt-3.5-turbo gpt-4 gpt-3.5-turbo-16k gpt-4-32k].include?(model_name) end diff --git a/lib/completions/endpoints/base.rb b/lib/completions/endpoints/base.rb index 624be145..56882de9 100644 --- a/lib/completions/endpoints/base.rb +++ b/lib/completions/endpoints/base.rb @@ -13,9 +13,9 @@ module DiscourseAi [ DiscourseAi::Completions::Endpoints::AwsBedrock, DiscourseAi::Completions::Endpoints::Anthropic, - DiscourseAi::Completions::Endpoints::OpenAI, - DiscourseAi::Completions::Endpoints::Huggingface, - ].detect(-> { raise DiscourseAi::Completions::LLM::UNKNOWN_MODEL }) do |ek| + DiscourseAi::Completions::Endpoints::OpenAi, + DiscourseAi::Completions::Endpoints::HuggingFace, + ].detect(-> { raise DiscourseAi::Completions::Llm::UNKNOWN_MODEL }) do |ek| ek.can_contact?(model_name) end end diff --git a/lib/completions/endpoints/hugging_face.rb b/lib/completions/endpoints/hugging_face.rb index 271a3394..bd418380 100644 --- a/lib/completions/endpoints/hugging_face.rb +++ b/lib/completions/endpoints/hugging_face.rb @@ -3,7 +3,7 @@ module DiscourseAi module Completions module Endpoints - class Huggingface < Base + class HuggingFace < Base def self.can_contact?(model_name) %w[StableBeluga2 Upstage-Llama-2-*-instruct-v2 Llama2-*-chat-hf].include?(model_name) end diff --git a/lib/completions/endpoints/open_ai.rb b/lib/completions/endpoints/open_ai.rb index 3388c00c..65b01314 100644 --- a/lib/completions/endpoints/open_ai.rb +++ b/lib/completions/endpoints/open_ai.rb @@ -3,7 +3,7 @@ module DiscourseAi module Completions module Endpoints - class OpenAI < Base + class OpenAi < Base def self.can_contact?(model_name) %w[gpt-3.5-turbo gpt-4 gpt-3.5-turbo-16k gpt-4-32k].include?(model_name) end diff --git a/lib/completions/entry_point.rb b/lib/completions/entry_point.rb deleted file mode 100644 index fa3d2ba6..00000000 --- a/lib/completions/entry_point.rb +++ /dev/null @@ -1,26 +0,0 @@ -# frozen_string_literal: true - -module DiscourseAi - module Completions - class EntryPoint - def load_files - require_relative "dialects/chat_gpt" - require_relative "dialects/llama2_classic" - require_relative "dialects/orca_style" - require_relative "dialects/claude" - - require_relative "endpoints/canned_response" - require_relative "endpoints/base" - require_relative "endpoints/anthropic" - require_relative "endpoints/aws_bedrock" - require_relative "endpoints/open_ai" - require_relative "endpoints/hugging_face" - - require_relative "llm" - end - - def inject_into(_) - end - end - end -end diff --git a/lib/completions/llm.rb b/lib/completions/llm.rb index 7210aad2..e6afd9da 100644 --- a/lib/completions/llm.rb +++ b/lib/completions/llm.rb @@ -14,7 +14,7 @@ # module DiscourseAi module Completions - class LLM + class Llm UNKNOWN_MODEL = Class.new(StandardError) def self.with_prepared_responses(responses) @@ -27,7 +27,7 @@ module DiscourseAi dialects = [ DiscourseAi::Completions::Dialects::Claude, DiscourseAi::Completions::Dialects::Llama2Classic, - DiscourseAi::Completions::Dialects::ChatGPT, + DiscourseAi::Completions::Dialects::ChatGpt, DiscourseAi::Completions::Dialects::OrcaStyle, ] diff --git a/lib/shared/database/connection.rb b/lib/database/connection.rb similarity index 100% rename from lib/shared/database/connection.rb rename to lib/database/connection.rb diff --git a/lib/modules/embeddings/entry_point.rb b/lib/embeddings/entry_point.rb similarity index 72% rename from lib/modules/embeddings/entry_point.rb rename to lib/embeddings/entry_point.rb index bb054bdb..b006dd5c 100644 --- a/lib/modules/embeddings/entry_point.rb +++ b/lib/embeddings/entry_point.rb @@ -3,21 +3,6 @@ module DiscourseAi module Embeddings class EntryPoint - def load_files - require_relative "vector_representations/base" - require_relative "vector_representations/all_mpnet_base_v2" - require_relative "vector_representations/text_embedding_ada_002" - require_relative "vector_representations/multilingual_e5_large" - require_relative "vector_representations/bge_large_en" - require_relative "strategies/truncation" - require_relative "jobs/regular/generate_embeddings" - require_relative "jobs/scheduled/embeddings_backfill" - require_relative "semantic_related" - require_relative "semantic_topic_query" - - require_relative "semantic_search" - end - def inject_into(plugin) # Include random topics in the suggested list *only* if there are no related topics. plugin.register_modifier( diff --git a/lib/modules/embeddings/semantic_related.rb b/lib/embeddings/semantic_related.rb similarity index 100% rename from lib/modules/embeddings/semantic_related.rb rename to lib/embeddings/semantic_related.rb diff --git a/lib/modules/embeddings/semantic_search.rb b/lib/embeddings/semantic_search.rb similarity index 98% rename from lib/modules/embeddings/semantic_search.rb rename to lib/embeddings/semantic_search.rb index 87ee12fb..330cda22 100644 --- a/lib/modules/embeddings/semantic_search.rb +++ b/lib/embeddings/semantic_search.rb @@ -103,14 +103,14 @@ module DiscourseAi TEXT input: <<~TEXT, Using this description, write a forum post about the subject inside the XML tags: - + #{search_term} TEXT post_insts: "Put the forum post between tags.", } llm_response = - DiscourseAi::Completions::LLM.proxy( + DiscourseAi::Completions::Llm.proxy( SiteSetting.ai_embeddings_semantic_search_hyde_model, ).completion!(prompt, @guardian.user) diff --git a/lib/modules/embeddings/semantic_topic_query.rb b/lib/embeddings/semantic_topic_query.rb similarity index 100% rename from lib/modules/embeddings/semantic_topic_query.rb rename to lib/embeddings/semantic_topic_query.rb diff --git a/lib/modules/embeddings/strategies/truncation.rb b/lib/embeddings/strategies/truncation.rb similarity index 100% rename from lib/modules/embeddings/strategies/truncation.rb rename to lib/embeddings/strategies/truncation.rb diff --git a/lib/modules/embeddings/vector_representations/all_mpnet_base_v2.rb b/lib/embeddings/vector_representations/all_mpnet_base_v2.rb similarity index 100% rename from lib/modules/embeddings/vector_representations/all_mpnet_base_v2.rb rename to lib/embeddings/vector_representations/all_mpnet_base_v2.rb diff --git a/lib/modules/embeddings/vector_representations/base.rb b/lib/embeddings/vector_representations/base.rb similarity index 100% rename from lib/modules/embeddings/vector_representations/base.rb rename to lib/embeddings/vector_representations/base.rb diff --git a/lib/modules/embeddings/vector_representations/bge_large_en.rb b/lib/embeddings/vector_representations/bge_large_en.rb similarity index 100% rename from lib/modules/embeddings/vector_representations/bge_large_en.rb rename to lib/embeddings/vector_representations/bge_large_en.rb diff --git a/lib/modules/embeddings/vector_representations/multilingual_e5_large.rb b/lib/embeddings/vector_representations/multilingual_e5_large.rb similarity index 100% rename from lib/modules/embeddings/vector_representations/multilingual_e5_large.rb rename to lib/embeddings/vector_representations/multilingual_e5_large.rb diff --git a/lib/modules/embeddings/vector_representations/text_embedding_ada_002.rb b/lib/embeddings/vector_representations/text_embedding_ada_002.rb similarity index 100% rename from lib/modules/embeddings/vector_representations/text_embedding_ada_002.rb rename to lib/embeddings/vector_representations/text_embedding_ada_002.rb diff --git a/lib/discourse_ai/engine.rb b/lib/engine.rb similarity index 100% rename from lib/discourse_ai/engine.rb rename to lib/engine.rb diff --git a/lib/shared/inference/amazon_bedrock_inference.rb b/lib/inference/amazon_bedrock_inference.rb similarity index 100% rename from lib/shared/inference/amazon_bedrock_inference.rb rename to lib/inference/amazon_bedrock_inference.rb diff --git a/lib/shared/inference/anthropic_completions.rb b/lib/inference/anthropic_completions.rb similarity index 100% rename from lib/shared/inference/anthropic_completions.rb rename to lib/inference/anthropic_completions.rb diff --git a/lib/shared/inference/cloudflare_workers_ai.rb b/lib/inference/cloudflare_workers_ai.rb similarity index 100% rename from lib/shared/inference/cloudflare_workers_ai.rb rename to lib/inference/cloudflare_workers_ai.rb diff --git a/lib/shared/inference/discourse_classifier.rb b/lib/inference/discourse_classifier.rb similarity index 100% rename from lib/shared/inference/discourse_classifier.rb rename to lib/inference/discourse_classifier.rb diff --git a/lib/shared/inference/discourse_reranker.rb b/lib/inference/discourse_reranker.rb similarity index 100% rename from lib/shared/inference/discourse_reranker.rb rename to lib/inference/discourse_reranker.rb diff --git a/lib/shared/inference/function.rb b/lib/inference/function.rb similarity index 100% rename from lib/shared/inference/function.rb rename to lib/inference/function.rb diff --git a/lib/shared/inference/function_list.rb b/lib/inference/function_list.rb similarity index 100% rename from lib/shared/inference/function_list.rb rename to lib/inference/function_list.rb diff --git a/lib/shared/inference/hugging_face_text_embeddings.rb b/lib/inference/hugging_face_text_embeddings.rb similarity index 100% rename from lib/shared/inference/hugging_face_text_embeddings.rb rename to lib/inference/hugging_face_text_embeddings.rb diff --git a/lib/shared/inference/hugging_face_text_generation.rb b/lib/inference/hugging_face_text_generation.rb similarity index 100% rename from lib/shared/inference/hugging_face_text_generation.rb rename to lib/inference/hugging_face_text_generation.rb diff --git a/lib/shared/inference/openai_completions.rb b/lib/inference/open_ai_completions.rb similarity index 100% rename from lib/shared/inference/openai_completions.rb rename to lib/inference/open_ai_completions.rb diff --git a/lib/shared/inference/openai_embeddings.rb b/lib/inference/open_ai_embeddings.rb similarity index 100% rename from lib/shared/inference/openai_embeddings.rb rename to lib/inference/open_ai_embeddings.rb diff --git a/lib/shared/inference/openai_image_generator.rb b/lib/inference/open_ai_image_generator.rb similarity index 100% rename from lib/shared/inference/openai_image_generator.rb rename to lib/inference/open_ai_image_generator.rb diff --git a/lib/shared/inference/stability_generator.rb b/lib/inference/stability_generator.rb similarity index 100% rename from lib/shared/inference/stability_generator.rb rename to lib/inference/stability_generator.rb diff --git a/lib/modules/nsfw/nsfw_classification.rb b/lib/nsfw/classification.rb similarity index 98% rename from lib/modules/nsfw/nsfw_classification.rb rename to lib/nsfw/classification.rb index c8187bb9..2c2623a2 100644 --- a/lib/modules/nsfw/nsfw_classification.rb +++ b/lib/nsfw/classification.rb @@ -1,8 +1,8 @@ # frozen_string_literal: true module DiscourseAi - module NSFW - class NSFWClassification + module Nsfw + class Classification def type :nsfw end diff --git a/lib/modules/nsfw/entry_point.rb b/lib/nsfw/entry_point.rb similarity index 65% rename from lib/modules/nsfw/entry_point.rb rename to lib/nsfw/entry_point.rb index 864c653f..bf6ef3ff 100644 --- a/lib/modules/nsfw/entry_point.rb +++ b/lib/nsfw/entry_point.rb @@ -1,18 +1,13 @@ # frozen_string_literal: true module DiscourseAi - module NSFW + module Nsfw class EntryPoint - def load_files - require_relative "nsfw_classification" - require_relative "jobs/regular/evaluate_post_uploads" - end - def inject_into(plugin) nsfw_detection_cb = Proc.new do |post| if SiteSetting.ai_nsfw_detection_enabled && - DiscourseAi::NSFW::NSFWClassification.new.can_classify?(post) + DiscourseAi::Nsfw::Classification.new.can_classify?(post) Jobs.enqueue(:evaluate_post_uploads, post_id: post.id) end end diff --git a/lib/shared/post_classificator.rb b/lib/post_classificator.rb similarity index 100% rename from lib/shared/post_classificator.rb rename to lib/post_classificator.rb diff --git a/lib/modules/sentiment/entry_point.rb b/lib/sentiment/entry_point.rb similarity index 95% rename from lib/modules/sentiment/entry_point.rb rename to lib/sentiment/entry_point.rb index 0e4a9003..2ce22cbc 100644 --- a/lib/modules/sentiment/entry_point.rb +++ b/lib/sentiment/entry_point.rb @@ -3,11 +3,6 @@ module DiscourseAi module Sentiment class EntryPoint - def load_files - require_relative "sentiment_classification" - require_relative "jobs/regular/post_sentiment_analysis" - end - def inject_into(plugin) sentiment_analysis_cb = Proc.new do |post| @@ -32,11 +27,11 @@ module DiscourseAi grouped_sentiments = DB.query( <<~SQL, - SELECT + SELECT DATE_TRUNC('day', p.created_at)::DATE AS posted_at, #{sentiment_count_sql.call("positive")}, -#{sentiment_count_sql.call("negative")} - FROM + FROM classification_results AS cr INNER JOIN posts p ON p.id = cr.target_id AND cr.target_type = 'Post' INNER JOIN topics t ON t.id = p.topic_id @@ -84,7 +79,7 @@ module DiscourseAi grouped_emotions = DB.query( <<~SQL, - SELECT + SELECT u.trust_level AS trust_level, #{emotion_count_clause.call("sadness")}, #{emotion_count_clause.call("surprise")}, diff --git a/lib/modules/sentiment/sentiment_classification.rb b/lib/sentiment/sentiment_classification.rb similarity index 100% rename from lib/modules/sentiment/sentiment_classification.rb rename to lib/sentiment/sentiment_classification.rb diff --git a/lib/shared/tokenizer/tokenizer.rb b/lib/shared/tokenizer/tokenizer.rb deleted file mode 100644 index 965c433b..00000000 --- a/lib/shared/tokenizer/tokenizer.rb +++ /dev/null @@ -1,103 +0,0 @@ -# frozen_string_literal: true - -module DiscourseAi - module Tokenizer - class BasicTokenizer - class << self - def tokenizer - raise NotImplementedError - end - - def tokenize(text) - tokenizer.encode(text).tokens - end - - def size(text) - tokenize(text).size - end - - def truncate(text, max_length) - # Fast track the common case where the text is already short enough. - return text if text.size < max_length - - tokenizer.decode(tokenizer.encode(text).ids.take(max_length)) - end - - def can_expand_tokens?(text, addition, max_length) - return true if text.size + addition.size < max_length - - tokenizer.encode(text).ids.length + tokenizer.encode(addition).ids.length < max_length - end - end - end - - class BertTokenizer < BasicTokenizer - def self.tokenizer - @@tokenizer ||= - Tokenizers.from_file("./plugins/discourse-ai/tokenizers/bert-base-uncased.json") - end - end - - class AnthropicTokenizer < BasicTokenizer - def self.tokenizer - @@tokenizer ||= - Tokenizers.from_file("./plugins/discourse-ai/tokenizers/claude-v1-tokenization.json") - end - end - - class AllMpnetBaseV2Tokenizer < BasicTokenizer - def self.tokenizer - @@tokenizer ||= - Tokenizers.from_file("./plugins/discourse-ai/tokenizers/all-mpnet-base-v2.json") - end - end - - class Llama2Tokenizer < BasicTokenizer - def self.tokenizer - @@tokenizer ||= - Tokenizers.from_file("./plugins/discourse-ai/tokenizers/llama-2-70b-chat-hf.json") - end - end - - class MultilingualE5LargeTokenizer < BasicTokenizer - def self.tokenizer - @@tokenizer ||= - Tokenizers.from_file("./plugins/discourse-ai/tokenizers/multilingual-e5-large.json") - end - end - - class BgeLargeEnTokenizer < BasicTokenizer - def self.tokenizer - @@tokenizer ||= Tokenizers.from_file("./plugins/discourse-ai/tokenizers/bge-large-en.json") - end - end - - class OpenAiTokenizer < BasicTokenizer - class << self - def tokenizer - @@tokenizer ||= Tiktoken.get_encoding("cl100k_base") - end - - def tokenize(text) - tokenizer.encode(text) - end - - def truncate(text, max_length) - # Fast track the common case where the text is already short enough. - return text if text.size < max_length - - tokenizer.decode(tokenize(text).take(max_length)) - rescue Tiktoken::UnicodeError - max_length = max_length - 1 - retry - end - - def can_expand_tokens?(text, addition, max_length) - return true if text.size + addition.size < max_length - - tokenizer.encode(text).length + tokenizer.encode(addition).length < max_length - end - end - end - end -end diff --git a/lib/modules/summarization/entry_point.rb b/lib/summarization/entry_point.rb similarity index 77% rename from lib/modules/summarization/entry_point.rb rename to lib/summarization/entry_point.rb index ad582637..66d359b3 100644 --- a/lib/modules/summarization/entry_point.rb +++ b/lib/summarization/entry_point.rb @@ -3,18 +3,6 @@ module DiscourseAi module Summarization class EntryPoint - def load_files - require_relative "models/base" - require_relative "models/anthropic" - require_relative "models/discourse" - require_relative "models/open_ai" - require_relative "models/llama2" - require_relative "models/llama2_fine_tuned_orca_style" - - require_relative "strategies/fold_content" - require_relative "strategies/truncate_content" - end - def inject_into(plugin) foldable_models = [ Models::OpenAi.new("gpt-4", max_tokens: 8192), diff --git a/lib/modules/summarization/models/anthropic.rb b/lib/summarization/models/anthropic.rb similarity index 100% rename from lib/modules/summarization/models/anthropic.rb rename to lib/summarization/models/anthropic.rb diff --git a/lib/modules/summarization/models/base.rb b/lib/summarization/models/base.rb similarity index 100% rename from lib/modules/summarization/models/base.rb rename to lib/summarization/models/base.rb diff --git a/lib/modules/summarization/models/discourse.rb b/lib/summarization/models/discourse.rb similarity index 100% rename from lib/modules/summarization/models/discourse.rb rename to lib/summarization/models/discourse.rb diff --git a/lib/modules/summarization/models/llama2.rb b/lib/summarization/models/llama2.rb similarity index 100% rename from lib/modules/summarization/models/llama2.rb rename to lib/summarization/models/llama2.rb diff --git a/lib/modules/summarization/models/llama2_fine_tuned_orca_style.rb b/lib/summarization/models/llama2_fine_tuned_orca_style.rb similarity index 100% rename from lib/modules/summarization/models/llama2_fine_tuned_orca_style.rb rename to lib/summarization/models/llama2_fine_tuned_orca_style.rb diff --git a/lib/modules/summarization/models/open_ai.rb b/lib/summarization/models/open_ai.rb similarity index 100% rename from lib/modules/summarization/models/open_ai.rb rename to lib/summarization/models/open_ai.rb diff --git a/lib/modules/summarization/strategies/fold_content.rb b/lib/summarization/strategies/fold_content.rb similarity index 98% rename from lib/modules/summarization/strategies/fold_content.rb rename to lib/summarization/strategies/fold_content.rb index b10bd248..731e3931 100644 --- a/lib/modules/summarization/strategies/fold_content.rb +++ b/lib/summarization/strategies/fold_content.rb @@ -19,7 +19,7 @@ module DiscourseAi def summarize(content, user, &on_partial_blk) opts = content.except(:contents) - llm = DiscourseAi::Completions::LLM.proxy(completion_model.model) + llm = DiscourseAi::Completions::Llm.proxy(completion_model.model) chunks = split_into_chunks(llm.tokenizer, content[:contents]) diff --git a/lib/modules/summarization/strategies/truncate_content.rb b/lib/summarization/strategies/truncate_content.rb similarity index 100% rename from lib/modules/summarization/strategies/truncate_content.rb rename to lib/summarization/strategies/truncate_content.rb diff --git a/lib/tokenizer/all_mpnet_base_v2_tokenizer.rb b/lib/tokenizer/all_mpnet_base_v2_tokenizer.rb new file mode 100644 index 00000000..4ce5a8ea --- /dev/null +++ b/lib/tokenizer/all_mpnet_base_v2_tokenizer.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +module DiscourseAi + module Tokenizer + class AllMpnetBaseV2Tokenizer < BasicTokenizer + def self.tokenizer + @@tokenizer ||= + Tokenizers.from_file("./plugins/discourse-ai/tokenizers/all-mpnet-base-v2.json") + end + end + end +end diff --git a/lib/tokenizer/anthropic_tokenizer.rb b/lib/tokenizer/anthropic_tokenizer.rb new file mode 100644 index 00000000..2c304edf --- /dev/null +++ b/lib/tokenizer/anthropic_tokenizer.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +module DiscourseAi + module Tokenizer + class AnthropicTokenizer < BasicTokenizer + def self.tokenizer + @@tokenizer ||= + Tokenizers.from_file("./plugins/discourse-ai/tokenizers/claude-v1-tokenization.json") + end + end + end +end diff --git a/lib/tokenizer/basic_tokenizer.rb b/lib/tokenizer/basic_tokenizer.rb new file mode 100644 index 00000000..f4afd675 --- /dev/null +++ b/lib/tokenizer/basic_tokenizer.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +module DiscourseAi + module Tokenizer + class BasicTokenizer + class << self + def tokenizer + raise NotImplementedError + end + + def tokenize(text) + tokenizer.encode(text).tokens + end + + def size(text) + tokenize(text).size + end + + def truncate(text, max_length) + # Fast track the common case where the text is already short enough. + return text if text.size < max_length + + tokenizer.decode(tokenizer.encode(text).ids.take(max_length)) + end + + def can_expand_tokens?(text, addition, max_length) + return true if text.size + addition.size < max_length + + tokenizer.encode(text).ids.length + tokenizer.encode(addition).ids.length < max_length + end + end + end + end +end diff --git a/lib/tokenizer/bert_tokenizer.rb b/lib/tokenizer/bert_tokenizer.rb new file mode 100644 index 00000000..671aaa9c --- /dev/null +++ b/lib/tokenizer/bert_tokenizer.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +module DiscourseAi + module Tokenizer + class BertTokenizer < BasicTokenizer + def self.tokenizer + @@tokenizer ||= + Tokenizers.from_file("./plugins/discourse-ai/tokenizers/bert-base-uncased.json") + end + end + end +end diff --git a/lib/tokenizer/bge_large_en_tokenizer.rb b/lib/tokenizer/bge_large_en_tokenizer.rb new file mode 100644 index 00000000..3130bda4 --- /dev/null +++ b/lib/tokenizer/bge_large_en_tokenizer.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +module DiscourseAi + module Tokenizer + class BgeLargeEnTokenizer < BasicTokenizer + def self.tokenizer + @@tokenizer ||= Tokenizers.from_file("./plugins/discourse-ai/tokenizers/bge-large-en.json") + end + end + end +end diff --git a/lib/tokenizer/llama2_tokenizer.rb b/lib/tokenizer/llama2_tokenizer.rb new file mode 100644 index 00000000..515d0063 --- /dev/null +++ b/lib/tokenizer/llama2_tokenizer.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +module DiscourseAi + module Tokenizer + class Llama2Tokenizer < BasicTokenizer + def self.tokenizer + @@tokenizer ||= + Tokenizers.from_file("./plugins/discourse-ai/tokenizers/llama-2-70b-chat-hf.json") + end + end + end +end diff --git a/lib/tokenizer/multilingual_e5_large_tokenizer.rb b/lib/tokenizer/multilingual_e5_large_tokenizer.rb new file mode 100644 index 00000000..81b18f99 --- /dev/null +++ b/lib/tokenizer/multilingual_e5_large_tokenizer.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +module DiscourseAi + module Tokenizer + class MultilingualE5LargeTokenizer < BasicTokenizer + def self.tokenizer + @@tokenizer ||= + Tokenizers.from_file("./plugins/discourse-ai/tokenizers/multilingual-e5-large.json") + end + end + end +end diff --git a/lib/tokenizer/open_ai_tokenizer.rb b/lib/tokenizer/open_ai_tokenizer.rb new file mode 100644 index 00000000..0a31ffce --- /dev/null +++ b/lib/tokenizer/open_ai_tokenizer.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +module DiscourseAi + module Tokenizer + class OpenAiTokenizer < BasicTokenizer + class << self + def tokenizer + @@tokenizer ||= Tiktoken.get_encoding("cl100k_base") + end + + def tokenize(text) + tokenizer.encode(text) + end + + def truncate(text, max_length) + # Fast track the common case where the text is already short enough. + return text if text.size < max_length + + tokenizer.decode(tokenize(text).take(max_length)) + rescue Tiktoken::UnicodeError + max_length = max_length - 1 + retry + end + + def can_expand_tokens?(text, addition, max_length) + return true if text.size + addition.size < max_length + + tokenizer.encode(text).length + tokenizer.encode(addition).length < max_length + end + end + end + end +end diff --git a/lib/modules/toxicity/entry_point.rb b/lib/toxicity/entry_point.rb similarity index 70% rename from lib/modules/toxicity/entry_point.rb rename to lib/toxicity/entry_point.rb index 32a24779..a16a4e32 100644 --- a/lib/modules/toxicity/entry_point.rb +++ b/lib/toxicity/entry_point.rb @@ -3,14 +3,6 @@ module DiscourseAi module Toxicity class EntryPoint - def load_files - require_relative "scan_queue" - require_relative "toxicity_classification" - - require_relative "jobs/regular/toxicity_classify_post" - require_relative "jobs/regular/toxicity_classify_chat_message" - end - def inject_into(plugin) post_analysis_cb = Proc.new { |post| DiscourseAi::Toxicity::ScanQueue.enqueue_post(post) } diff --git a/lib/modules/toxicity/scan_queue.rb b/lib/toxicity/scan_queue.rb similarity index 100% rename from lib/modules/toxicity/scan_queue.rb rename to lib/toxicity/scan_queue.rb diff --git a/lib/modules/toxicity/toxicity_classification.rb b/lib/toxicity/toxicity_classification.rb similarity index 100% rename from lib/modules/toxicity/toxicity_classification.rb rename to lib/toxicity/toxicity_classification.rb diff --git a/plugin.rb b/plugin.rb index 4e415cd2..2dd299ee 100644 --- a/plugin.rb +++ b/plugin.rb @@ -29,57 +29,25 @@ module ::DiscourseAi PLUGIN_NAME = "discourse-ai" end -require_relative "lib/discourse_ai/engine" +Rails.autoloaders.main.push_dir(File.join(__dir__, "lib"), namespace: ::DiscourseAi) + +require_relative "lib/engine" after_initialize do - require_relative "lib/shared/inference/discourse_classifier" - require_relative "lib/shared/inference/discourse_reranker" - require_relative "lib/shared/inference/openai_completions" - require_relative "lib/shared/inference/openai_embeddings" - require_relative "lib/shared/inference/openai_image_generator" - require_relative "lib/shared/inference/anthropic_completions" - require_relative "lib/shared/inference/stability_generator" - require_relative "lib/shared/inference/hugging_face_text_generation" - require_relative "lib/shared/inference/amazon_bedrock_inference" - require_relative "lib/shared/inference/cloudflare_workers_ai" - require_relative "lib/shared/inference/hugging_face_text_embeddings" - require_relative "lib/shared/inference/function" - require_relative "lib/shared/inference/function_list" - - require_relative "lib/shared/classificator" - require_relative "lib/shared/post_classificator" - require_relative "lib/shared/chat_message_classificator" - - require_relative "lib/shared/tokenizer/tokenizer" - - require_relative "lib/shared/database/connection" - - require_relative "lib/completions/entry_point" - - require_relative "lib/modules/nsfw/entry_point" - require_relative "lib/modules/toxicity/entry_point" - require_relative "lib/modules/sentiment/entry_point" - require_relative "lib/modules/ai_helper/entry_point" - require_relative "lib/modules/embeddings/entry_point" - require_relative "lib/modules/summarization/entry_point" - require_relative "lib/modules/ai_bot/entry_point" - require_relative "lib/discourse_automation/llm_triage" + # do not autoload this cause we may have no namespace + require_relative "discourse_automation/llm_triage" add_admin_route "discourse_ai.title", "discourse-ai" [ - DiscourseAi::Completions::EntryPoint.new, DiscourseAi::Embeddings::EntryPoint.new, - DiscourseAi::NSFW::EntryPoint.new, + DiscourseAi::Nsfw::EntryPoint.new, DiscourseAi::Toxicity::EntryPoint.new, DiscourseAi::Sentiment::EntryPoint.new, DiscourseAi::AiHelper::EntryPoint.new, DiscourseAi::Summarization::EntryPoint.new, DiscourseAi::AiBot::EntryPoint.new, - ].each do |a_module| - a_module.load_files - a_module.inject_into(self) - end + ].each { |a_module| a_module.inject_into(self) } register_reviewable_type ReviewableAiChatMessage register_reviewable_type ReviewableAiPost diff --git a/spec/lib/completions/dialects/chat_gpt_spec.rb b/spec/lib/completions/dialects/chat_gpt_spec.rb index 599f1113..27b56b49 100644 --- a/spec/lib/completions/dialects/chat_gpt_spec.rb +++ b/spec/lib/completions/dialects/chat_gpt_spec.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -RSpec.describe DiscourseAi::Completions::Dialects::ChatGPT do +RSpec.describe DiscourseAi::Completions::Dialects::ChatGpt do subject(:dialect) { described_class.new } let(:prompt) do diff --git a/spec/lib/completions/endpoints/aws_bedrock_spec.rb b/spec/lib/completions/endpoints/aws_bedrock_spec.rb index c0938709..5c0cb8cc 100644 --- a/spec/lib/completions/endpoints/aws_bedrock_spec.rb +++ b/spec/lib/completions/endpoints/aws_bedrock_spec.rb @@ -1,6 +1,8 @@ # frozen_string_literal: true require_relative "endpoint_examples" +require "aws-eventstream" +require "aws-sigv4" RSpec.describe DiscourseAi::Completions::Endpoints::AwsBedrock do subject(:model) { described_class.new(model_name, DiscourseAi::Tokenizer::AnthropicTokenizer) } diff --git a/spec/lib/completions/endpoints/hugging_face_spec.rb b/spec/lib/completions/endpoints/hugging_face_spec.rb index 0acd480f..cfe76e76 100644 --- a/spec/lib/completions/endpoints/hugging_face_spec.rb +++ b/spec/lib/completions/endpoints/hugging_face_spec.rb @@ -2,7 +2,7 @@ require_relative "endpoint_examples" -RSpec.describe DiscourseAi::Completions::Endpoints::Huggingface do +RSpec.describe DiscourseAi::Completions::Endpoints::HuggingFace do subject(:model) { described_class.new(model_name, DiscourseAi::Tokenizer::Llama2Tokenizer) } let(:model_name) { "Llama2-*-chat-hf" } diff --git a/spec/lib/completions/endpoints/open_ai_spec.rb b/spec/lib/completions/endpoints/open_ai_spec.rb index 63d33d78..fa22d461 100644 --- a/spec/lib/completions/endpoints/open_ai_spec.rb +++ b/spec/lib/completions/endpoints/open_ai_spec.rb @@ -2,7 +2,7 @@ require_relative "endpoint_examples" -RSpec.describe DiscourseAi::Completions::Endpoints::OpenAI do +RSpec.describe DiscourseAi::Completions::Endpoints::OpenAi do subject(:model) { described_class.new(model_name, DiscourseAi::Tokenizer::OpenAiTokenizer) } let(:model_name) { "gpt-3.5-turbo" } diff --git a/spec/lib/completions/llm_spec.rb b/spec/lib/completions/llm_spec.rb index 9c7148c0..66f53060 100644 --- a/spec/lib/completions/llm_spec.rb +++ b/spec/lib/completions/llm_spec.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -RSpec.describe DiscourseAi::Completions::LLM do +RSpec.describe DiscourseAi::Completions::Llm do subject(:llm) do described_class.new( DiscourseAi::Completions::Dialects::OrcaStyle.new, @@ -16,7 +16,7 @@ RSpec.describe DiscourseAi::Completions::LLM do fake_model = "unknown_v2" expect { described_class.proxy(fake_model) }.to( - raise_error(DiscourseAi::Completions::LLM::UNKNOWN_MODEL), + raise_error(DiscourseAi::Completions::Llm::UNKNOWN_MODEL), ) end end diff --git a/spec/lib/modules/ai_bot/commands/search_command_spec.rb b/spec/lib/modules/ai_bot/commands/search_command_spec.rb index 215fd3b2..f4e6dbc6 100644 --- a/spec/lib/modules/ai_bot/commands/search_command_spec.rb +++ b/spec/lib/modules/ai_bot/commands/search_command_spec.rb @@ -67,7 +67,7 @@ RSpec.describe DiscourseAi::AiBot::Commands::SearchCommand do .returns([post1.topic_id]) results = - DiscourseAi::Completions::LLM.with_prepared_responses(["#{query}"]) do + DiscourseAi::Completions::Llm.with_prepared_responses(["#{query}"]) do search.process(search_query: "hello world, sam", status: "public") end diff --git a/spec/lib/modules/ai_helper/assistant_spec.rb b/spec/lib/modules/ai_helper/assistant_spec.rb index 913bbfec..8de19228 100644 --- a/spec/lib/modules/ai_helper/assistant_spec.rb +++ b/spec/lib/modules/ai_helper/assistant_spec.rb @@ -22,7 +22,7 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do it "Sends the prompt to the LLM and returns the response" do response = - DiscourseAi::Completions::LLM.with_prepared_responses([english_text]) do + DiscourseAi::Completions::Llm.with_prepared_responses([english_text]) do subject.generate_and_send_prompt(prompt, text_to_translate, user) end @@ -47,7 +47,7 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do ] response = - DiscourseAi::Completions::LLM.with_prepared_responses([titles]) do + DiscourseAi::Completions::Llm.with_prepared_responses([titles]) do subject.generate_and_send_prompt(prompt, english_text, user) end diff --git a/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb b/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb index b07f7940..f0c27f7a 100644 --- a/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb +++ b/spec/lib/modules/ai_helper/chat_thread_titler_spec.rb @@ -12,7 +12,7 @@ RSpec.describe DiscourseAi::AiHelper::ChatThreadTitler do "The solitary horseThe horse etched in goldA horse's infinite journeyA horse lost in timeA horse's last ride" expected_title = "The solitary horse" result = - DiscourseAi::Completions::LLM.with_prepared_responses([titles]) { titler.suggested_title } + DiscourseAi::Completions::Llm.with_prepared_responses([titles]) { titler.suggested_title } expect(result).to eq(expected_title) end diff --git a/spec/lib/modules/ai_helper/painter_spec.rb b/spec/lib/modules/ai_helper/painter_spec.rb index 977ecb46..2c627b35 100644 --- a/spec/lib/modules/ai_helper/painter_spec.rb +++ b/spec/lib/modules/ai_helper/painter_spec.rb @@ -34,7 +34,7 @@ RSpec.describe DiscourseAi::AiHelper::Painter do StableDiffusionStubs.new.stub_response(expected_image_prompt, artifacts) thumbnails = - DiscourseAi::Completions::LLM.with_prepared_responses([expected_image_prompt]) do + DiscourseAi::Completions::Llm.with_prepared_responses([expected_image_prompt]) do thumbnails = subject.commission_thumbnails(raw_content, user) end diff --git a/spec/lib/modules/embeddings/semantic_search_spec.rb b/spec/lib/modules/embeddings/semantic_search_spec.rb index b406dbaa..47c88c4f 100644 --- a/spec/lib/modules/embeddings/semantic_search_spec.rb +++ b/spec/lib/modules/embeddings/semantic_search_spec.rb @@ -31,7 +31,7 @@ RSpec.describe DiscourseAi::Embeddings::SemanticSearch do end def trigger_search(query) - DiscourseAi::Completions::LLM.with_prepared_responses(["#{hypothetical_post}"]) do + DiscourseAi::Completions::Llm.with_prepared_responses(["#{hypothetical_post}"]) do subject.search_for_topics(query) end end @@ -127,7 +127,7 @@ RSpec.describe DiscourseAi::Embeddings::SemanticSearch do context "while searching as anon" do it "returns an empty list" do posts = - DiscourseAi::Completions::LLM.with_prepared_responses( + DiscourseAi::Completions::Llm.with_prepared_responses( ["#{hypothetical_post}"], ) { described_class.new(Guardian.new(nil)).search_for_topics(query) } diff --git a/spec/lib/modules/nsfw/entry_point_spec.rb b/spec/lib/modules/nsfw/entry_point_spec.rb index 93142527..acc26326 100644 --- a/spec/lib/modules/nsfw/entry_point_spec.rb +++ b/spec/lib/modules/nsfw/entry_point_spec.rb @@ -2,7 +2,7 @@ require "rails_helper" -describe DiscourseAi::NSFW::EntryPoint do +describe DiscourseAi::Nsfw::EntryPoint do fab!(:user) { Fabricate(:user) } describe "registering event callbacks" do diff --git a/spec/lib/modules/nsfw/nsfw_classification_spec.rb b/spec/lib/modules/nsfw/nsfw_classification_spec.rb index 1d3b99e1..15de208e 100644 --- a/spec/lib/modules/nsfw/nsfw_classification_spec.rb +++ b/spec/lib/modules/nsfw/nsfw_classification_spec.rb @@ -3,7 +3,7 @@ require "rails_helper" require_relative "../../../support/nsfw_inference_stubs" -describe DiscourseAi::NSFW::NSFWClassification do +describe DiscourseAi::Nsfw::Classification do before { SiteSetting.ai_nsfw_inference_service_api_endpoint = "http://test.com" } let(:available_models) { SiteSetting.ai_nsfw_models.split("|") } diff --git a/spec/lib/modules/summarization/strategies/fold_content_spec.rb b/spec/lib/modules/summarization/strategies/fold_content_spec.rb index dfe35528..3598e883 100644 --- a/spec/lib/modules/summarization/strategies/fold_content_spec.rb +++ b/spec/lib/modules/summarization/strategies/fold_content_spec.rb @@ -25,7 +25,7 @@ RSpec.describe DiscourseAi::Summarization::Strategies::FoldContent do context "when the content to summarize fits in a single call" do it "does one call to summarize content" do result = - DiscourseAi::Completions::LLM.with_prepared_responses([single_summary]) do |spy| + DiscourseAi::Completions::Llm.with_prepared_responses([single_summary]) do |spy| strategy.summarize(content, user).tap { expect(spy.completions).to eq(1) } end @@ -38,7 +38,7 @@ RSpec.describe DiscourseAi::Summarization::Strategies::FoldContent do content[:contents] << { poster: "asd2", id: 2, text: summarize_text } result = - DiscourseAi::Completions::LLM.with_prepared_responses( + DiscourseAi::Completions::Llm.with_prepared_responses( [single_summary, single_summary, concatenated_summary], ) { |spy| strategy.summarize(content, user).tap { expect(spy.completions).to eq(3) } } diff --git a/spec/requests/ai_helper/assistant_controller_spec.rb b/spec/requests/ai_helper/assistant_controller_spec.rb index 0e0c6173..0f46a202 100644 --- a/spec/requests/ai_helper/assistant_controller_spec.rb +++ b/spec/requests/ai_helper/assistant_controller_spec.rb @@ -57,7 +57,7 @@ RSpec.describe DiscourseAi::AiHelper::AssistantController do end it "returns a generic error when the completion call fails" do - DiscourseAi::Completions::LLM + DiscourseAi::Completions::Llm .any_instance .expects(:completion!) .raises(DiscourseAi::Completions::Endpoints::Base::CompletionFailed) @@ -71,7 +71,7 @@ RSpec.describe DiscourseAi::AiHelper::AssistantController do expected_diff = "

The rain in Spain, spain stays mainly in the Planeplane.

" - DiscourseAi::Completions::LLM.with_prepared_responses([proofreaded_text]) do + DiscourseAi::Completions::Llm.with_prepared_responses([proofreaded_text]) do post "/discourse-ai/ai-helper/suggest", params: { mode: mode, text: text_to_proofread } expect(response.status).to eq(200) diff --git a/spec/system/ai_helper/ai_composer_helper_spec.rb b/spec/system/ai_helper/ai_composer_helper_spec.rb index f157f2c0..2a670b3d 100644 --- a/spec/system/ai_helper/ai_composer_helper_spec.rb +++ b/spec/system/ai_helper/ai_composer_helper_spec.rb @@ -89,7 +89,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do ai_helper_context_menu.click_ai_button ai_helper_context_menu.fill_custom_prompt(custom_prompt_input) - DiscourseAi::Completions::LLM.with_prepared_responses([custom_prompt_response]) do + DiscourseAi::Completions::Llm.with_prepared_responses([custom_prompt_response]) do ai_helper_context_menu.click_custom_prompt_button wait_for { composer.composer_input.value == custom_prompt_response } @@ -119,7 +119,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do trigger_context_menu(spanish_input) ai_helper_context_menu.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([input]) do + DiscourseAi::Completions::Llm.with_prepared_responses([input]) do ai_helper_context_menu.select_helper_model(mode) wait_for { composer.composer_input.value == input } @@ -132,7 +132,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do trigger_context_menu(spanish_input) ai_helper_context_menu.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([input]) do + DiscourseAi::Completions::Llm.with_prepared_responses([input]) do ai_helper_context_menu.select_helper_model(mode) wait_for { composer.composer_input.value == input } @@ -146,7 +146,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do trigger_context_menu(spanish_input) ai_helper_context_menu.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([input]) do + DiscourseAi::Completions::Llm.with_prepared_responses([input]) do ai_helper_context_menu.select_helper_model(mode) wait_for { composer.composer_input.value == input } @@ -161,7 +161,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do trigger_context_menu(spanish_input) ai_helper_context_menu.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([input]) do + DiscourseAi::Completions::Llm.with_prepared_responses([input]) do ai_helper_context_menu.select_helper_model(mode) wait_for { composer.composer_input.value == input } @@ -175,7 +175,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do trigger_context_menu(spanish_input) ai_helper_context_menu.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([input]) do + DiscourseAi::Completions::Llm.with_prepared_responses([input]) do ai_helper_context_menu.select_helper_model(mode) wait_for { composer.composer_input.value == input } @@ -189,7 +189,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do trigger_context_menu(spanish_input) ai_helper_context_menu.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([input]) do + DiscourseAi::Completions::Llm.with_prepared_responses([input]) do ai_helper_context_menu.select_helper_model(mode) wait_for { composer.composer_input.value == input } @@ -210,7 +210,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do trigger_context_menu(spanish_input) ai_helper_context_menu.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([input]) do + DiscourseAi::Completions::Llm.with_prepared_responses([input]) do ai_helper_context_menu.select_helper_model(mode) wait_for { composer.composer_input.value == input } @@ -230,7 +230,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do trigger_context_menu(spanish_input) ai_helper_context_menu.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([input]) do + DiscourseAi::Completions::Llm.with_prepared_responses([input]) do ai_helper_context_menu.select_helper_model(mode) wait_for { composer.composer_input.value == input } @@ -250,7 +250,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do trigger_context_menu(input) ai_helper_context_menu.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([proofread_text]) do + DiscourseAi::Completions::Llm.with_prepared_responses([proofread_text]) do ai_helper_context_menu.select_helper_model(mode) wait_for { composer.composer_input.value == proofread_text } @@ -272,7 +272,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do visit("/latest") page.find("#create-topic").click composer.fill_content(input) - DiscourseAi::Completions::LLM.with_prepared_responses([titles]) do + DiscourseAi::Completions::Llm.with_prepared_responses([titles]) do ai_suggestion_dropdown.click_suggest_titles_button wait_for { ai_suggestion_dropdown.has_dropdown? } @@ -285,7 +285,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do visit("/latest") page.find("#create-topic").click composer.fill_content(input) - DiscourseAi::Completions::LLM.with_prepared_responses([titles]) do + DiscourseAi::Completions::Llm.with_prepared_responses([titles]) do ai_suggestion_dropdown.click_suggest_titles_button wait_for { ai_suggestion_dropdown.has_dropdown? } @@ -302,7 +302,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do page.find("#create-topic").click composer.fill_content(input) - DiscourseAi::Completions::LLM.with_prepared_responses([titles]) do + DiscourseAi::Completions::Llm.with_prepared_responses([titles]) do ai_suggestion_dropdown.click_suggest_titles_button wait_for { ai_suggestion_dropdown.has_dropdown? } diff --git a/spec/system/ai_helper/ai_post_helper_spec.rb b/spec/system/ai_helper/ai_post_helper_spec.rb index a3e7432c..4569554a 100644 --- a/spec/system/ai_helper/ai_post_helper_spec.rb +++ b/spec/system/ai_helper/ai_post_helper_spec.rb @@ -19,9 +19,9 @@ RSpec.describe "AI Composer helper", type: :system, js: true do let(:post_ai_helper) { PageObjects::Components::AIHelperPostOptions.new } let(:explain_response) { <<~STRING } - In this context, \"pie\" refers to a baked dessert typically consisting of a pastry crust and filling. - The person states they enjoy eating pie, considering it a good dessert. They note that some people wastefully - throw pie at others, but the person themselves chooses to eat the pie rather than throwing it. Overall, \"pie\" + In this context, \"pie\" refers to a baked dessert typically consisting of a pastry crust and filling. + The person states they enjoy eating pie, considering it a good dessert. They note that some people wastefully + throw pie at others, but the person themselves chooses to eat the pie rather than throwing it. Overall, \"pie\" is being used to refer the the baked dessert food item. STRING @@ -63,7 +63,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do select_post_text(post) post_ai_helper.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([explain_response]) do + DiscourseAi::Completions::Llm.with_prepared_responses([explain_response]) do post_ai_helper.select_helper_model(mode) wait_for { post_ai_helper.suggestion_value == explain_response } @@ -84,7 +84,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do select_post_text(post_2) post_ai_helper.click_ai_button - DiscourseAi::Completions::LLM.with_prepared_responses([translated_input]) do + DiscourseAi::Completions::Llm.with_prepared_responses([translated_input]) do post_ai_helper.select_helper_model(mode) wait_for { post_ai_helper.suggestion_value == translated_input }