discourse-ai/spec/lib/modules/ai_bot/tools/summarize_spec.rb
Roman Rizzi 8d5f901a67
DEV: Rewire AI bot internals to use LlmModel (#638)
* DRAFT: Create AI Bot users dynamically and support custom LlmModels

* Get user associated to llm_model

* Track enabled bots with attribute

* Don't store bot username. Minor touches to migrate default values in settings

* Handle scenario where vLLM uses a SRV record

* Made 3.5-turbo-16k the default version so we can remove hack
2024-06-18 14:32:14 -03:00

55 lines
1.7 KiB
Ruby

#frozen_string_literal: true
RSpec.describe DiscourseAi::AiBot::Tools::Summarize do
let(:bot_user) { DiscourseAi::AiBot::EntryPoint.find_user_from_model("gpt-3.5-turbo") }
let(:llm) { DiscourseAi::Completions::Llm.proxy("open_ai:gpt-3.5-turbo") }
let(:progress_blk) { Proc.new {} }
before { SiteSetting.ai_bot_enabled = true }
let(:summary) { "summary stuff" }
describe "#process" do
it "can generate correct info" do
post = Fabricate(:post)
DiscourseAi::Completions::Llm.with_prepared_responses([summary]) do
summarization =
described_class.new(
{ topic_id: post.topic_id, guidance: "why did it happen?" },
bot_user: bot_user,
llm: llm,
)
info = summarization.invoke(&progress_blk)
expect(info).to include("Topic summarized")
expect(summarization.custom_raw).to include(summary)
expect(summarization.chain_next_response?).to eq(false)
end
end
it "protects hidden data" do
category = Fabricate(:category)
category.set_permissions({})
category.save!
topic = Fabricate(:topic, category_id: category.id)
post = Fabricate(:post, topic: topic)
DiscourseAi::Completions::Llm.with_prepared_responses([summary]) do
summarization =
described_class.new(
{ topic_id: post.topic_id, guidance: "why did it happen?" },
bot_user: bot_user,
llm: llm,
)
info = summarization.invoke(&progress_blk)
expect(info).not_to include(post.raw)
expect(summarization.custom_raw).to eq(I18n.t("discourse_ai.ai_bot.topic_not_found"))
end
end
end
end