discourse-ai/spec/lib/modules/ai_helper/open_ai_prompt_spec.rb
Roman Rizzi 320ac6e84b
REFACTOR: Store prompts in a dedicated table. (#14)
This change makes it easier to add new prompts to our AI helper. We don't have a UI for it yet. You'll have to do it through a console.
2023-03-17 15:14:19 -03:00

67 lines
1.9 KiB
Ruby

# frozen_string_literal: true
require_relative "../../../support/openai_completions_inference_stubs"
RSpec.describe DiscourseAi::AiHelper::OpenAiPrompt do
let(:prompt) { CompletionPrompt.find_by(name: mode) }
describe "#generate_and_send_prompt" do
context "when using the translate mode" do
let(:mode) { "translate" }
before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
it "Sends the prompt to chatGPT and returns the response" do
response =
subject.generate_and_send_prompt(prompt, OpenAiCompletionsInferenceStubs.spanish_text)
expect(response[:suggestions]).to contain_exactly(
OpenAiCompletionsInferenceStubs.translated_response.strip,
)
end
end
context "when using the proofread mode" do
let(:mode) { "proofread" }
before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
it "Sends the prompt to chatGPT and returns the response" do
response =
subject.generate_and_send_prompt(
prompt,
OpenAiCompletionsInferenceStubs.translated_response,
)
expect(response[:suggestions]).to contain_exactly(
OpenAiCompletionsInferenceStubs.proofread_response.strip,
)
end
end
context "when generating titles" do
let(:mode) { "generate_titles" }
before { OpenAiCompletionsInferenceStubs.stub_prompt(mode) }
it "returns an array with each title" do
expected =
OpenAiCompletionsInferenceStubs
.generated_titles
.gsub("\"", "")
.gsub(/\d./, "")
.split("\n")
.map(&:strip)
response =
subject.generate_and_send_prompt(
prompt,
OpenAiCompletionsInferenceStubs.translated_response,
)
expect(response[:suggestions]).to contain_exactly(*expected)
end
end
end
end