discourse-ai/spec/lib/modules/ai_helper/entry_point_spec.rb
Roman Rizzi 8849caf136
DEV: Transition "Select model" settings to only use LlmModels (#675)
We no longer support the "provider:model" format in the "ai_helper_model" and
"ai_embeddings_semantic_search_hyde_model" settings. We'll migrate existing
values and work with our new data-driven LLM configs from now on.
2024-06-19 18:01:35 -03:00

40 lines
1.4 KiB
Ruby

# frozen_string_literal: true
describe DiscourseAi::AiHelper::EntryPoint do
fab!(:english_user) { Fabricate(:user) }
fab!(:french_user) { Fabricate(:user, locale: "fr") }
it "will correctly localize available prompts" do
assign_fake_provider_to(:ai_helper_model)
SiteSetting.default_locale = "en"
SiteSetting.allow_user_locale = true
SiteSetting.composer_ai_helper_enabled = true
SiteSetting.ai_helper_allowed_groups = "10" # tl0
DiscourseAi::AiHelper::Assistant.clear_prompt_cache!
Group.refresh_automatic_groups!
serializer = CurrentUserSerializer.new(english_user, scope: Guardian.new(english_user))
parsed = JSON.parse(serializer.to_json)
translate_prompt =
parsed["current_user"]["ai_helper_prompts"].find { |prompt| prompt["name"] == "translate" }
expect(translate_prompt["translated_name"]).to eq(
I18n.t("discourse_ai.ai_helper.prompts.translate"),
)
I18n.with_locale("fr") do
serializer = CurrentUserSerializer.new(french_user, scope: Guardian.new(french_user))
parsed = JSON.parse(serializer.to_json)
translate_prompt =
parsed["current_user"]["ai_helper_prompts"].find { |prompt| prompt["name"] == "translate" }
expect(translate_prompt["translated_name"]).to eq(
I18n.t("discourse_ai.ai_helper.prompts.translate", locale: "fr"),
)
end
end
end