discourse-ai/lib/translation/base_translator.rb
Sam 471f96f972
FEATURE: allow seeing configured LLM on feature page (#1460)
This is an interim fix so we can at least tell what feature is
being used for what LLM.

It also adds some test coverage to the feature page.
2025-06-24 17:42:47 +10:00

70 lines
2.0 KiB
Ruby

# frozen_string_literal: true
module DiscourseAi
module Translation
class BaseTranslator
def initialize(text:, target_locale:, topic: nil, post: nil)
@text = text
@target_locale = target_locale
@topic = topic
@post = post
end
def translate
return nil if !SiteSetting.ai_translation_enabled
if (ai_persona = AiPersona.find_by(id: persona_setting)).blank?
return nil
end
translation_user = ai_persona.user || Discourse.system_user
persona_klass = ai_persona.class_instance
persona = persona_klass.new
model = self.class.preferred_llm_model(persona_klass)
return nil if model.blank?
bot = DiscourseAi::Personas::Bot.as(translation_user, persona:, model:)
ContentSplitter
.split(content: @text, chunk_size: model.max_output_tokens)
.map { |text| get_translation(text:, bot:, translation_user:) }
.join("")
end
private
def formatted_content(content)
{ content:, target_locale: @target_locale }.to_json
end
def get_translation(text:, bot:, translation_user:)
context =
DiscourseAi::Personas::BotContext.new(
user: translation_user,
skip_tool_details: true,
feature_name: "translation",
messages: [{ type: :user, content: formatted_content(text) }],
topic: @topic,
post: @post,
)
structured_output = nil
bot.reply(context) do |partial, _, type|
structured_output = partial if type == :structured_output
end
structured_output&.read_buffered_property(:translation)
end
def persona_setting
raise NotImplementedError
end
def self.preferred_llm_model(persona_klass)
id = persona_klass.default_llm_id || SiteSetting.ai_translation_model&.split(":")&.last
return nil if id.blank?
LlmModel.find_by(id:)
end
end
end
end