discourse-ai/spec/models/completion_prompt_spec.rb
Sam 7c65dd171f
FIX: regression, no longer sending examples to AI helper (#993)
For a while now we have not been sending the examples to AI
helper, which can lead to inconsistent results.

Note: this also means that in non English we did not send
English results, so this may end up reducing performance

That said first thing we need to do is fix the regression.
2024-12-03 16:03:46 +11:00

81 lines
2.4 KiB
Ruby

# frozen_string_literal: true
RSpec.describe CompletionPrompt do
describe "validations" do
context "when there are too many messages" do
it "doesn't accept more than 20 messages" do
prompt = described_class.new(messages: [{ role: "system", content: "a" }] * 21)
expect(prompt.valid?).to eq(false)
end
end
context "when the message is over the max length" do
it "doesn't accept messages when the length is more than 1000 characters" do
prompt = described_class.new(messages: [{ role: "system", content: "a" * 1001 }])
expect(prompt.valid?).to eq(false)
end
end
end
describe "messages_with_input" do
let(:user_input) { "A user wrote this." }
context "when mapping to a prompt" do
it "correctly maps everything to the prompt" do
cp =
CompletionPrompt.new(
messages: {
insts: "Instructions",
post_insts: "Post Instructions",
examples: [["Request 1", "Response 1"]],
},
)
prompt = cp.messages_with_input("hello")
expected = [
{ type: :system, content: "Instructions\nPost Instructions" },
{ type: :user, content: "Request 1" },
{ type: :model, content: "Response 1" },
{ type: :user, content: "<input>hello</input>" },
]
expect(prompt.messages).to eq(expected)
end
end
context "when the record has the custom_prompt type" do
let(:custom_prompt) { described_class.find(described_class::CUSTOM_PROMPT) }
it "wraps the user input with <input> XML tags and adds a custom instruction if given" do
expected = <<~TEXT.strip
<input>Translate to Turkish:
#{user_input}</input>
TEXT
custom_prompt.custom_instruction = "Translate to Turkish"
prompt = custom_prompt.messages_with_input(user_input)
expect(prompt.messages.last[:content]).to eq(expected)
end
end
context "when the records don't have the custom_prompt type" do
let(:title_prompt) { described_class.find(described_class::GENERATE_TITLES) }
it "wraps user input with <input> XML tags" do
expected = "<input>#{user_input}</input>"
title_prompt.custom_instruction = "Translate to Turkish"
prompt = title_prompt.messages_with_input(user_input)
expect(prompt.messages.last[:content]).to eq(expected)
end
end
end
end