2023-05-16 13:38:21 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
require_relative "../../../support/openai_completions_inference_stubs"
|
|
|
|
|
2023-09-14 17:02:37 -04:00
|
|
|
class FakeBot < DiscourseAi::AiBot::Bot
|
|
|
|
class Tokenizer
|
|
|
|
def tokenize(text)
|
|
|
|
text.split(" ")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def tokenizer
|
|
|
|
Tokenizer.new
|
|
|
|
end
|
|
|
|
|
|
|
|
def prompt_limit
|
|
|
|
10_000
|
|
|
|
end
|
|
|
|
|
|
|
|
def build_message(poster_username, content, system: false, function: nil)
|
|
|
|
role = poster_username == bot_user.username ? "Assistant" : "Human"
|
|
|
|
|
|
|
|
"#{role}: #{content}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def submit_prompt(prompt, prefer_low_cost: false)
|
|
|
|
rows = @responses.shift
|
|
|
|
rows.each { |data| yield data, lambda {} }
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_delta(partial, context)
|
|
|
|
partial
|
|
|
|
end
|
|
|
|
|
|
|
|
def add_response(response)
|
|
|
|
@responses ||= []
|
|
|
|
@responses << response
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe FakeBot do
|
|
|
|
fab!(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT4_ID) }
|
|
|
|
fab!(:post) { Fabricate(:post, raw: "hello world") }
|
|
|
|
|
|
|
|
it "can handle command truncation for long messages" do
|
|
|
|
bot = FakeBot.new(bot_user)
|
|
|
|
|
|
|
|
bot.add_response(["hello this is a big test I am testing 123\n", "!tags\nabc"])
|
|
|
|
bot.add_response(["this is the reply"])
|
|
|
|
|
|
|
|
bot.reply_to(post)
|
|
|
|
|
|
|
|
reply = post.topic.posts.order(:post_number).last
|
|
|
|
|
|
|
|
expect(reply.raw).not_to include("abc")
|
|
|
|
expect(reply.post_custom_prompt.custom_prompt.to_s).not_to include("abc")
|
|
|
|
expect(reply.post_custom_prompt.custom_prompt.length).to eq(3)
|
|
|
|
expect(reply.post_custom_prompt.custom_prompt[0][0]).to eq(
|
|
|
|
"hello this is a big test I am testing 123\n!tags",
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "can handle command truncation for short bot messages" do
|
|
|
|
bot = FakeBot.new(bot_user)
|
|
|
|
|
|
|
|
bot.add_response(["hello\n", "!tags\nabc"])
|
|
|
|
bot.add_response(["this is the reply"])
|
|
|
|
|
|
|
|
bot.reply_to(post)
|
|
|
|
|
|
|
|
reply = post.topic.posts.order(:post_number).last
|
|
|
|
|
|
|
|
expect(reply.raw).not_to include("abc")
|
|
|
|
expect(reply.post_custom_prompt.custom_prompt.to_s).not_to include("abc")
|
|
|
|
expect(reply.post_custom_prompt.custom_prompt.length).to eq(3)
|
|
|
|
expect(reply.post_custom_prompt.custom_prompt[0][0]).to eq("hello\n!tags")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe DiscourseAi::AiBot::Bot do
|
2023-05-31 19:10:33 -04:00
|
|
|
fab!(:bot_user) { User.find(DiscourseAi::AiBot::EntryPoint::GPT4_ID) }
|
2023-05-20 03:45:54 -04:00
|
|
|
fab!(:bot) { described_class.as(bot_user) }
|
2023-05-16 13:38:21 -04:00
|
|
|
|
2023-05-20 03:45:54 -04:00
|
|
|
fab!(:user) { Fabricate(:user) }
|
|
|
|
fab!(:pm) do
|
|
|
|
Fabricate(
|
|
|
|
:private_message_topic,
|
|
|
|
title: "This is my special PM",
|
|
|
|
user: user,
|
|
|
|
topic_allowed_users: [
|
|
|
|
Fabricate.build(:topic_allowed_user, user: user),
|
|
|
|
Fabricate.build(:topic_allowed_user, user: bot_user),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
end
|
|
|
|
fab!(:first_post) { Fabricate(:post, topic: pm, user: user, raw: "This is a reply by the user") }
|
|
|
|
fab!(:second_post) do
|
|
|
|
Fabricate(:post, topic: pm, user: user, raw: "This is a second reply by the user")
|
|
|
|
end
|
2023-05-16 13:38:21 -04:00
|
|
|
|
2023-05-20 03:45:54 -04:00
|
|
|
describe "#system_prompt" do
|
|
|
|
it "includes relevant context in system prompt" do
|
|
|
|
bot.system_prompt_style!(:standard)
|
|
|
|
|
|
|
|
SiteSetting.title = "My Forum"
|
|
|
|
SiteSetting.site_description = "My Forum Description"
|
|
|
|
|
|
|
|
system_prompt = bot.system_prompt(second_post)
|
|
|
|
|
|
|
|
expect(system_prompt).to include(SiteSetting.title)
|
|
|
|
expect(system_prompt).to include(SiteSetting.site_description)
|
|
|
|
|
|
|
|
expect(system_prompt).to include(user.username)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#reply_to" do
|
2023-08-03 19:37:58 -04:00
|
|
|
it "can respond to a search command" do
|
2023-05-20 03:45:54 -04:00
|
|
|
bot.system_prompt_style!(:simple)
|
|
|
|
|
2023-06-19 18:45:31 -04:00
|
|
|
expected_response = {
|
|
|
|
function_call: {
|
|
|
|
name: "search",
|
|
|
|
arguments: { query: "test search" }.to_json,
|
|
|
|
},
|
|
|
|
}
|
2023-05-20 03:45:54 -04:00
|
|
|
|
|
|
|
prompt = bot.bot_prompt_with_topic_context(second_post)
|
|
|
|
|
2023-06-19 18:45:31 -04:00
|
|
|
req_opts = bot.reply_params.merge({ functions: bot.available_functions, stream: true })
|
|
|
|
|
2023-05-20 03:45:54 -04:00
|
|
|
OpenAiCompletionsInferenceStubs.stub_streamed_response(
|
|
|
|
prompt,
|
2023-06-19 18:45:31 -04:00
|
|
|
[expected_response],
|
|
|
|
model: bot.model_for,
|
|
|
|
req_opts: req_opts,
|
2023-05-20 03:45:54 -04:00
|
|
|
)
|
|
|
|
|
2023-06-21 03:10:30 -04:00
|
|
|
result =
|
|
|
|
DiscourseAi::AiBot::Commands::SearchCommand
|
2023-08-14 02:30:12 -04:00
|
|
|
.new(bot_user: nil, args: nil)
|
2023-08-03 19:37:58 -04:00
|
|
|
.process(query: "test search")
|
2023-06-21 03:10:30 -04:00
|
|
|
.to_json
|
|
|
|
|
|
|
|
prompt << { role: "function", content: result, name: "search" }
|
2023-05-20 03:45:54 -04:00
|
|
|
|
|
|
|
OpenAiCompletionsInferenceStubs.stub_streamed_response(
|
|
|
|
prompt,
|
2023-06-19 18:45:31 -04:00
|
|
|
[content: "I found nothing, sorry"],
|
|
|
|
model: bot.model_for,
|
|
|
|
req_opts: req_opts,
|
2023-05-20 03:45:54 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
bot.reply_to(second_post)
|
|
|
|
|
|
|
|
last = second_post.topic.posts.order("id desc").first
|
|
|
|
|
|
|
|
expect(last.raw).to include("<details>")
|
|
|
|
expect(last.raw).to include("<summary>Search</summary>")
|
|
|
|
expect(last.raw).not_to include("translation missing")
|
2023-06-19 18:45:31 -04:00
|
|
|
expect(last.raw).to include("I found nothing")
|
2023-05-23 09:08:17 -04:00
|
|
|
|
2023-06-20 01:44:03 -04:00
|
|
|
expect(last.post_custom_prompt.custom_prompt).to eq(
|
2023-06-21 03:10:30 -04:00
|
|
|
[[result, "search", "function"], ["I found nothing, sorry", bot_user.username]],
|
2023-06-20 01:44:03 -04:00
|
|
|
)
|
2023-05-20 03:45:54 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
describe "#update_pm_title" do
|
|
|
|
let(:expected_response) { "This is a suggested title" }
|
2023-05-16 13:38:21 -04:00
|
|
|
|
|
|
|
before { SiteSetting.min_personal_message_post_length = 5 }
|
|
|
|
|
|
|
|
it "updates the title using bot suggestions" do
|
|
|
|
OpenAiCompletionsInferenceStubs.stub_response(
|
2023-06-19 18:45:31 -04:00
|
|
|
bot.title_prompt(second_post),
|
2023-05-16 13:38:21 -04:00
|
|
|
expected_response,
|
2023-06-19 18:45:31 -04:00
|
|
|
model: bot.model_for,
|
2023-05-16 13:38:21 -04:00
|
|
|
req_opts: {
|
|
|
|
temperature: 0.7,
|
|
|
|
top_p: 0.9,
|
|
|
|
max_tokens: 40,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2023-05-20 03:45:54 -04:00
|
|
|
bot.update_pm_title(second_post)
|
2023-05-16 13:38:21 -04:00
|
|
|
|
2023-05-20 03:45:54 -04:00
|
|
|
expect(pm.reload.title).to eq(expected_response)
|
2023-05-16 13:38:21 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|