The rain in Spain, spain stays mainly in the Planeplane.
"
- post "/discourse-ai/ai-helper/suggest", params: { mode: mode, text: text }
+ DiscourseAi::Completions::LLM.with_prepared_responses([proofreaded_text]) do
+ post "/discourse-ai/ai-helper/suggest", params: { mode: mode, text: text_to_proofread }
- expect(response.status).to eq(200)
- expect(response.parsed_body["suggestions"].first).to eq(
- OpenAiCompletionsInferenceStubs.proofread_response.strip,
- )
+ expect(response.status).to eq(200)
+ expect(response.parsed_body["suggestions"].first).to eq(proofreaded_text)
+ expect(response.parsed_body["diff"]).to eq(expected_diff)
+ end
end
end
end
diff --git a/spec/system/ai_helper/ai_composer_helper_spec.rb b/spec/system/ai_helper/ai_composer_helper_spec.rb
index c0f265a3..db286741 100644
--- a/spec/system/ai_helper/ai_composer_helper_spec.rb
+++ b/spec/system/ai_helper/ai_composer_helper_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
sign_in(user)
end
+ let(:input) { "The rain in spain stays mainly in the Plane." }
+
let(:composer) { PageObjects::Components::Composer.new }
let(:ai_helper_context_menu) { PageObjects::Components::AIHelperContextMenu.new }
let(:diff_modal) { PageObjects::Modals::DiffModal.new }
@@ -31,14 +33,14 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
context "when triggering AI with context menu in composer" do
it "shows the context menu when selecting a passage of text in the composer" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
expect(ai_helper_context_menu).to have_context_menu
end
it "does not show the context menu when selecting insuffient text" do
visit("/latest")
page.find("#create-topic").click
- composer.fill_content(OpenAiCompletionsInferenceStubs.translated_response)
+ composer.fill_content(input)
page.execute_script(
"const input = document.querySelector('.d-editor-input'); input.setSelectionRange(0, 2);",
)
@@ -46,283 +48,269 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
end
it "shows context menu in 'trigger' state when first showing" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
expect(ai_helper_context_menu).to be_showing_triggers
end
it "shows prompt options in context menu when AI button is clicked" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
ai_helper_context_menu.click_ai_button
expect(ai_helper_context_menu).to be_showing_options
end
it "closes the context menu when clicking outside" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
find(".d-editor-preview").click
expect(ai_helper_context_menu).to have_no_context_menu
end
context "when using custom prompt" do
- let(:mode) { OpenAiCompletionsInferenceStubs::CUSTOM_PROMPT }
- before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
+ let(:mode) { CompletionPrompt::CUSTOM_PROMPT }
+
+ let(:custom_prompt_input) { "Translate to French" }
+ let(:custom_prompt_response) { "La pluie en Espagne reste principalement dans l'avion." }
it "shows custom prompt option" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
ai_helper_context_menu.click_ai_button
expect(ai_helper_context_menu).to have_custom_prompt
end
it "enables the custom prompt button when input is filled" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
ai_helper_context_menu.click_ai_button
expect(ai_helper_context_menu).to have_custom_prompt_button_disabled
- ai_helper_context_menu.fill_custom_prompt(
- OpenAiCompletionsInferenceStubs.custom_prompt_input,
- )
+ ai_helper_context_menu.fill_custom_prompt(custom_prompt_input)
expect(ai_helper_context_menu).to have_custom_prompt_button_enabled
end
it "replaces the composed message with AI generated content" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
ai_helper_context_menu.click_ai_button
- ai_helper_context_menu.fill_custom_prompt(
- OpenAiCompletionsInferenceStubs.custom_prompt_input,
- )
- ai_helper_context_menu.click_custom_prompt_button
+ ai_helper_context_menu.fill_custom_prompt(custom_prompt_input)
- wait_for do
- composer.composer_input.value ==
- OpenAiCompletionsInferenceStubs.custom_prompt_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([custom_prompt_response]) do
+ ai_helper_context_menu.click_custom_prompt_button
+
+ wait_for { composer.composer_input.value == custom_prompt_response }
+
+ expect(composer.composer_input.value).to eq(custom_prompt_response)
end
-
- expect(composer.composer_input.value).to eq(
- OpenAiCompletionsInferenceStubs.custom_prompt_response.strip,
- )
end
end
context "when not a member of custom prompt group" do
- let(:mode) { OpenAiCompletionsInferenceStubs::CUSTOM_PROMPT }
+ let(:mode) { CompletionPrompt::CUSTOM_PROMPT }
before { SiteSetting.ai_helper_custom_prompts_allowed_groups = non_member_group.id.to_s }
it "does not show custom prompt option" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
ai_helper_context_menu.click_ai_button
expect(ai_helper_context_menu).to have_no_custom_prompt
end
end
context "when using translation mode" do
- let(:mode) { OpenAiCompletionsInferenceStubs::TRANSLATE }
- before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
+ let(:mode) { CompletionPrompt::TRANSLATE }
+
+ let(:spanish_input) { "La lluvia en España se queda principalmente en el avión." }
it "replaces the composed message with AI generated content" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.spanish_text)
+ trigger_context_menu(spanish_input)
ai_helper_context_menu.click_ai_button
- ai_helper_context_menu.select_helper_model(
- OpenAiCompletionsInferenceStubs.text_mode_to_id(mode),
- )
- wait_for do
- composer.composer_input.value == OpenAiCompletionsInferenceStubs.translated_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([input]) do
+ ai_helper_context_menu.select_helper_model(mode)
+
+ wait_for { composer.composer_input.value == input }
+
+ expect(composer.composer_input.value).to eq(input)
end
-
- expect(composer.composer_input.value).to eq(
- OpenAiCompletionsInferenceStubs.translated_response.strip,
- )
end
it "shows reset options after results are complete" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.spanish_text)
+ trigger_context_menu(spanish_input)
ai_helper_context_menu.click_ai_button
- ai_helper_context_menu.select_helper_model(
- OpenAiCompletionsInferenceStubs.text_mode_to_id(mode),
- )
- wait_for do
- composer.composer_input.value == OpenAiCompletionsInferenceStubs.translated_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([input]) do
+ ai_helper_context_menu.select_helper_model(mode)
+
+ wait_for { composer.composer_input.value == input }
+
+ ai_helper_context_menu.click_confirm_button
+ expect(ai_helper_context_menu).to be_showing_resets
end
-
- ai_helper_context_menu.click_confirm_button
- expect(ai_helper_context_menu).to be_showing_resets
end
it "reverts results when Undo button is clicked" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.spanish_text)
+ trigger_context_menu(spanish_input)
ai_helper_context_menu.click_ai_button
- ai_helper_context_menu.select_helper_model(
- OpenAiCompletionsInferenceStubs.text_mode_to_id(mode),
- )
- wait_for do
- composer.composer_input.value == OpenAiCompletionsInferenceStubs.translated_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([input]) do
+ ai_helper_context_menu.select_helper_model(mode)
+
+ wait_for { composer.composer_input.value == input }
+
+ ai_helper_context_menu.click_confirm_button
+ ai_helper_context_menu.click_undo_button
+ expect(composer.composer_input.value).to eq(spanish_input)
end
-
- ai_helper_context_menu.click_confirm_button
- ai_helper_context_menu.click_undo_button
- expect(composer.composer_input.value).to eq(OpenAiCompletionsInferenceStubs.spanish_text)
end
it "reverts results when revert button is clicked" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.spanish_text)
+ trigger_context_menu(spanish_input)
ai_helper_context_menu.click_ai_button
- ai_helper_context_menu.select_helper_model(
- OpenAiCompletionsInferenceStubs.text_mode_to_id(mode),
- )
- wait_for do
- composer.composer_input.value == OpenAiCompletionsInferenceStubs.translated_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([input]) do
+ ai_helper_context_menu.select_helper_model(mode)
+
+ wait_for { composer.composer_input.value == input }
+
+ ai_helper_context_menu.click_revert_button
+ expect(composer.composer_input.value).to eq(spanish_input)
end
-
- ai_helper_context_menu.click_revert_button
- expect(composer.composer_input.value).to eq(OpenAiCompletionsInferenceStubs.spanish_text)
end
it "reverts results when Ctrl/Cmd + Z is pressed on the keyboard" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.spanish_text)
+ trigger_context_menu(spanish_input)
ai_helper_context_menu.click_ai_button
- ai_helper_context_menu.select_helper_model(
- OpenAiCompletionsInferenceStubs.text_mode_to_id(mode),
- )
- wait_for do
- composer.composer_input.value == OpenAiCompletionsInferenceStubs.translated_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([input]) do
+ ai_helper_context_menu.select_helper_model(mode)
+
+ wait_for { composer.composer_input.value == input }
+
+ ai_helper_context_menu.press_undo_keys
+ expect(composer.composer_input.value).to eq(spanish_input)
end
-
- ai_helper_context_menu.press_undo_keys
- expect(composer.composer_input.value).to eq(OpenAiCompletionsInferenceStubs.spanish_text)
end
it "confirms the results when confirm button is pressed" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.spanish_text)
+ trigger_context_menu(spanish_input)
ai_helper_context_menu.click_ai_button
- ai_helper_context_menu.select_helper_model(
- OpenAiCompletionsInferenceStubs.text_mode_to_id(mode),
- )
- wait_for do
- composer.composer_input.value == OpenAiCompletionsInferenceStubs.translated_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([input]) do
+ ai_helper_context_menu.select_helper_model(mode)
+
+ wait_for { composer.composer_input.value == input }
+
+ ai_helper_context_menu.click_confirm_button
+ expect(composer.composer_input.value).to eq(input)
end
-
- ai_helper_context_menu.click_confirm_button
- expect(composer.composer_input.value).to eq(
- OpenAiCompletionsInferenceStubs.translated_response.strip,
- )
end
it "hides the context menu when pressing Escape on the keyboard" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.spanish_text)
+ trigger_context_menu(spanish_input)
ai_helper_context_menu.click_ai_button
ai_helper_context_menu.press_escape_key
expect(ai_helper_context_menu).to have_no_context_menu
end
it "shows the changes in a modal when view changes button is pressed" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.spanish_text)
+ trigger_context_menu(spanish_input)
ai_helper_context_menu.click_ai_button
- ai_helper_context_menu.select_helper_model(
- OpenAiCompletionsInferenceStubs.text_mode_to_id(mode),
- )
- wait_for do
- composer.composer_input.value == OpenAiCompletionsInferenceStubs.translated_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([input]) do
+ ai_helper_context_menu.select_helper_model(mode)
+
+ wait_for { composer.composer_input.value == input }
+
+ ai_helper_context_menu.click_view_changes_button
+ expect(diff_modal).to be_visible
+ expect(diff_modal.old_value).to eq(spanish_input.gsub(/[[:space:]]+/, " ").strip)
+ expect(diff_modal.new_value).to eq(
+ input.gsub(/[[:space:]]+/, " ").gsub(/[‘’]/, "'").gsub(/[“”]/, '"').strip,
+ )
+ diff_modal.confirm_changes
+ expect(ai_helper_context_menu).to be_showing_resets
end
-
- ai_helper_context_menu.click_view_changes_button
- expect(diff_modal).to be_visible
- expect(diff_modal.old_value).to eq(
- OpenAiCompletionsInferenceStubs.spanish_text.gsub(/[[:space:]]+/, " ").strip,
- )
- expect(diff_modal.new_value).to eq(
- OpenAiCompletionsInferenceStubs
- .translated_response
- .gsub(/[[:space:]]+/, " ")
- .gsub(/[‘’]/, "'")
- .gsub(/[“”]/, '"')
- .strip,
- )
- diff_modal.confirm_changes
- expect(ai_helper_context_menu).to be_showing_resets
end
it "should not close the context menu when in review state" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.spanish_text)
+ trigger_context_menu(spanish_input)
ai_helper_context_menu.click_ai_button
- ai_helper_context_menu.select_helper_model(
- OpenAiCompletionsInferenceStubs.text_mode_to_id(mode),
- )
- wait_for do
- composer.composer_input.value == OpenAiCompletionsInferenceStubs.translated_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([input]) do
+ ai_helper_context_menu.select_helper_model(mode)
+
+ wait_for { composer.composer_input.value == input }
+
+ find(".d-editor-preview").click
+ expect(ai_helper_context_menu).to have_context_menu
end
-
- find(".d-editor-preview").click
- expect(ai_helper_context_menu).to have_context_menu
end
end
context "when using the proofreading mode" do
- let(:mode) { OpenAiCompletionsInferenceStubs::PROOFREAD }
- before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
+ let(:mode) { CompletionPrompt::PROOFREAD }
+
+ let(:proofread_text) { "The rain in Spain, stays mainly in the Plane." }
it "replaces the composed message with AI generated content" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
ai_helper_context_menu.click_ai_button
- ai_helper_context_menu.select_helper_model(
- OpenAiCompletionsInferenceStubs.text_mode_to_id(mode),
- )
- wait_for do
- composer.composer_input.value == OpenAiCompletionsInferenceStubs.proofread_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([proofread_text]) do
+ ai_helper_context_menu.select_helper_model(mode)
+
+ wait_for { composer.composer_input.value == proofread_text }
+
+ expect(composer.composer_input.value).to eq(proofread_text)
end
-
- expect(composer.composer_input.value).to eq(
- OpenAiCompletionsInferenceStubs.proofread_response.strip,
- )
end
end
end
context "when suggesting titles with AI title suggester" do
- let(:mode) { OpenAiCompletionsInferenceStubs::GENERATE_TITLES }
- before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
+ let(:mode) { CompletionPrompt::GENERATE_TITLES }
+
+ let(:titles) do
+ "Rainy Spain*Plane-Bound Delights*Mysterious Spain*Plane-Rain Chronicles*Unveiling Spain"
+ end
it "opens a menu with title suggestions" do
visit("/latest")
page.find("#create-topic").click
- composer.fill_content(OpenAiCompletionsInferenceStubs.translated_response)
- ai_suggestion_dropdown.click_suggest_titles_button
+ composer.fill_content(input)
+ DiscourseAi::Completions::LLM.with_prepared_responses([titles]) do
+ ai_suggestion_dropdown.click_suggest_titles_button
- wait_for { ai_suggestion_dropdown.has_dropdown? }
+ wait_for { ai_suggestion_dropdown.has_dropdown? }
- expect(ai_suggestion_dropdown).to have_dropdown
+ expect(ai_suggestion_dropdown).to have_dropdown
+ end
end
it "replaces the topic title with the selected title" do
visit("/latest")
page.find("#create-topic").click
- composer.fill_content(OpenAiCompletionsInferenceStubs.translated_response)
- ai_suggestion_dropdown.click_suggest_titles_button
+ composer.fill_content(input)
+ DiscourseAi::Completions::LLM.with_prepared_responses([titles]) do
+ ai_suggestion_dropdown.click_suggest_titles_button
- wait_for { ai_suggestion_dropdown.has_dropdown? }
+ wait_for { ai_suggestion_dropdown.has_dropdown? }
- ai_suggestion_dropdown.select_suggestion_by_value(2)
- expected_title = "The Quiet Piece that Moves Literature: A Gaucho's Story"
+ ai_suggestion_dropdown.select_suggestion_by_value(1)
+ expected_title = "Plane-Bound Delights"
- expect(find("#reply-title").value).to eq(expected_title)
+ expect(find("#reply-title").value).to eq(expected_title)
+ end
end
it "closes the menu when clicking outside" do
visit("/latest")
page.find("#create-topic").click
- composer.fill_content(OpenAiCompletionsInferenceStubs.translated_response)
- ai_suggestion_dropdown.click_suggest_titles_button
+ composer.fill_content(input)
- wait_for { ai_suggestion_dropdown.has_dropdown? }
+ DiscourseAi::Completions::LLM.with_prepared_responses([titles]) do
+ ai_suggestion_dropdown.click_suggest_titles_button
- find(".d-editor-preview").click
+ wait_for { ai_suggestion_dropdown.has_dropdown? }
- expect(ai_suggestion_dropdown).to have_no_dropdown
+ find(".d-editor-preview").click
+
+ expect(ai_suggestion_dropdown).to have_no_dropdown
+ end
end
it "only shows trigger button if there is sufficient content in the composer" do
@@ -332,7 +320,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
expect(ai_suggestion_dropdown).to have_no_suggestion_button
- composer.fill_content(OpenAiCompletionsInferenceStubs.translated_response)
+ composer.fill_content(input)
expect(ai_suggestion_dropdown).to have_suggestion_button
end
end
@@ -350,7 +338,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
DiscourseAi::AiHelper::SemanticCategorizer.any_instance.stubs(:categories).returns(response)
visit("/latest")
page.find("#create-topic").click
- composer.fill_content(OpenAiCompletionsInferenceStubs.translated_response)
+ composer.fill_content(input)
ai_suggestion_dropdown.click_suggest_category_button
wait_for { ai_suggestion_dropdown.has_dropdown? }
@@ -376,7 +364,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
visit("/latest")
page.find("#create-topic").click
- composer.fill_content(OpenAiCompletionsInferenceStubs.translated_response)
+ composer.fill_content(input)
ai_suggestion_dropdown.click_suggest_tags_button
@@ -391,41 +379,35 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
end
context "when AI helper is disabled" do
- let(:mode) { OpenAiCompletionsInferenceStubs::GENERATE_TITLES }
- before do
- OpenAiCompletionsInferenceStubs.stub_prompt(mode)
- SiteSetting.composer_ai_helper_enabled = false
- end
+ let(:mode) { CompletionPrompt::GENERATE_TITLES }
+ before { SiteSetting.composer_ai_helper_enabled = false }
it "does not trigger AI context menu" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
expect(ai_helper_context_menu).to have_no_context_menu
end
it "does not trigger AI suggestion buttons" do
visit("/latest")
page.find("#create-topic").click
- composer.fill_content(OpenAiCompletionsInferenceStubs.translated_response)
+ composer.fill_content(input)
expect(ai_suggestion_dropdown).to have_no_suggestion_button
end
end
context "when user is not a member of AI helper allowed group" do
- let(:mode) { OpenAiCompletionsInferenceStubs::GENERATE_TITLES }
- before do
- OpenAiCompletionsInferenceStubs.stub_prompt(mode)
- SiteSetting.ai_helper_allowed_groups = non_member_group.id.to_s
- end
+ let(:mode) { CompletionPrompt::GENERATE_TITLES }
+ before { SiteSetting.ai_helper_allowed_groups = non_member_group.id.to_s }
it "does not trigger AI context menu" do
- trigger_context_menu(OpenAiCompletionsInferenceStubs.translated_response)
+ trigger_context_menu(input)
expect(ai_helper_context_menu).to have_no_context_menu
end
it "does not trigger AI suggestion buttons" do
visit("/latest")
page.find("#create-topic").click
- composer.fill_content(OpenAiCompletionsInferenceStubs.translated_response)
+ composer.fill_content(input)
expect(ai_suggestion_dropdown).to have_no_suggestion_button
end
end
diff --git a/spec/system/ai_helper/ai_post_helper_spec.rb b/spec/system/ai_helper/ai_post_helper_spec.rb
index 81038d4d..a3e7432c 100644
--- a/spec/system/ai_helper/ai_post_helper_spec.rb
+++ b/spec/system/ai_helper/ai_post_helper_spec.rb
@@ -13,11 +13,18 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
)
end
fab!(:post_2) do
- Fabricate(:post, topic: topic, raw: OpenAiCompletionsInferenceStubs.spanish_text)
+ Fabricate(:post, topic: topic, raw: "La lluvia en España se queda principalmente en el avión.")
end
let(:topic_page) { PageObjects::Pages::Topic.new }
let(:post_ai_helper) { PageObjects::Components::AIHelperPostOptions.new }
+ let(:explain_response) { <<~STRING }
+ In this context, \"pie\" refers to a baked dessert typically consisting of a pastry crust and filling.
+ The person states they enjoy eating pie, considering it a good dessert. They note that some people wastefully
+ throw pie at others, but the person themselves chooses to eat the pie rather than throwing it. Overall, \"pie\"
+ is being used to refer the the baked dessert food item.
+ STRING
+
before do
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)
SiteSetting.composer_ai_helper_enabled = true
@@ -50,44 +57,40 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
context "when using explain mode" do
skip "TODO: Fix explain mode option not appearing in spec" do
- let(:mode) { OpenAiCompletionsInferenceStubs::EXPLAIN }
- before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
+ let(:mode) { CompletionPrompt::EXPLAIN }
it "shows an explanation of the selected text" do
select_post_text(post)
post_ai_helper.click_ai_button
- post_ai_helper.select_helper_model(OpenAiCompletionsInferenceStubs.text_mode_to_id(mode))
- wait_for do
- post_ai_helper.suggestion_value ==
- OpenAiCompletionsInferenceStubs.explain_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([explain_response]) do
+ post_ai_helper.select_helper_model(mode)
+
+ wait_for { post_ai_helper.suggestion_value == explain_response }
+
+ expect(post_ai_helper.suggestion_value).to eq(explain_response)
end
-
- expect(post_ai_helper.suggestion_value).to eq(
- OpenAiCompletionsInferenceStubs.explain_response.strip,
- )
end
end
end
context "when using translate mode" do
skip "TODO: Fix WebMock request for translate mode not working" do
- let(:mode) { OpenAiCompletionsInferenceStubs::TRANSLATE }
- before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
+ let(:mode) { CompletionPrompt::TRANSLATE }
+
+ let(:translated_input) { "The rain in Spain, stays mainly in the Plane." }
it "shows a translation of the selected text" do
select_post_text(post_2)
post_ai_helper.click_ai_button
- post_ai_helper.select_helper_model(OpenAiCompletionsInferenceStubs.text_mode_to_id(mode))
- wait_for do
- post_ai_helper.suggestion_value ==
- OpenAiCompletionsInferenceStubs.translated_response.strip
+ DiscourseAi::Completions::LLM.with_prepared_responses([translated_input]) do
+ post_ai_helper.select_helper_model(mode)
+
+ wait_for { post_ai_helper.suggestion_value == translated_input }
+
+ expect(post_ai_helper.suggestion_value).to eq(translated_input)
end
-
- expect(post_ai_helper.suggestion_value).to eq(
- OpenAiCompletionsInferenceStubs.translated_response.strip,
- )
end
end
end