mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-07-01 20:12:15 +00:00
Related: https://github.com/discourse/discourse-translator/pull/310 This commit includes all the jobs and event hooks to localize posts, topics, and categories. A few notes: - `feature_name: "translation"` because the site setting is `ai-translation` and module is `Translation` - we will switch to proper ai-feature in the near future, and can consider using the persona_user as `localization.localizer_user_id` - keeping things flat within the module for now as we will be moving to ai-feature soon and have to rearrange - Settings renamed/introduced are: - ai_translation_backfill_rate (0) - ai_translation_backfill_limit_to_public_content (true) - ai_translation_backfill_max_age_days (5) - ai_translation_verbose_logs (false)
66 lines
2.3 KiB
Ruby
66 lines
2.3 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
require "rails_helper"
|
|
|
|
describe DiscourseAi::Translation::BaseTranslator do
|
|
before do
|
|
Fabricate(:fake_model).tap do |fake_llm|
|
|
SiteSetting.public_send("ai_translation_model=", "custom:#{fake_llm.id}")
|
|
end
|
|
end
|
|
|
|
describe ".translate" do
|
|
let(:text_to_translate) { "cats are great" }
|
|
let(:target_language) { "de" }
|
|
let(:llm_response) { "hur dur hur dur!" }
|
|
|
|
it "creates the correct prompt" do
|
|
post_translator =
|
|
DiscourseAi::Translation::PostRawTranslator.new(text_to_translate, target_language)
|
|
allow(DiscourseAi::Completions::Prompt).to receive(:new).with(
|
|
DiscourseAi::Translation::PostRawTranslator::PROMPT_TEMPLATE,
|
|
messages: [{ type: :user, content: post_translator.formatted_content, id: "user" }],
|
|
).and_call_original
|
|
|
|
DiscourseAi::Completions::Llm.with_prepared_responses([llm_response]) do
|
|
post_translator.translate
|
|
end
|
|
end
|
|
|
|
it "sends the translation prompt to the selected ai helper model" do
|
|
mock_prompt = instance_double(DiscourseAi::Completions::Prompt)
|
|
mock_llm = instance_double(DiscourseAi::Completions::Llm)
|
|
post_translator =
|
|
DiscourseAi::Translation::PostRawTranslator.new(text_to_translate, target_language)
|
|
|
|
structured_output =
|
|
DiscourseAi::Completions::StructuredOutput.new({ translation: { type: "string" } })
|
|
structured_output << { translation: llm_response }.to_json
|
|
|
|
allow(DiscourseAi::Completions::Prompt).to receive(:new).and_return(mock_prompt)
|
|
allow(DiscourseAi::Completions::Llm).to receive(:proxy).with(
|
|
SiteSetting.ai_translation_model,
|
|
).and_return(mock_llm)
|
|
allow(mock_llm).to receive(:generate).with(
|
|
mock_prompt,
|
|
user: Discourse.system_user,
|
|
feature_name: "translation",
|
|
response_format: post_translator.response_format,
|
|
).and_return(structured_output)
|
|
|
|
post_translator.translate
|
|
end
|
|
|
|
it "returns the translation from the llm's response" do
|
|
DiscourseAi::Completions::Llm.with_prepared_responses([llm_response]) do
|
|
expect(
|
|
DiscourseAi::Translation::PostRawTranslator.new(
|
|
text_to_translate,
|
|
target_language,
|
|
).translate,
|
|
).to eq "hur dur hur dur!"
|
|
end
|
|
end
|
|
end
|
|
end
|