discourse-ai/spec/requests/summarization/chat_summary_controller_spec.rb
Sam 1320eed9b2
FEATURE: move summary to use llm_model (#699)
This allows summary to use the new LLM models and migrates of API key based model selection

Claude 3.5 etc... all work now. 

---------

Co-authored-by: Roman Rizzi <rizziromanalejandro@gmail.com>
2024-07-04 10:48:18 +10:00

31 lines
821 B
Ruby

# frozen_string_literal: true
RSpec.describe DiscourseAi::Summarization::ChatSummaryController do
fab!(:current_user) { Fabricate(:user) }
fab!(:group)
before do
group.add(current_user)
assign_fake_provider_to(:ai_summarization_model)
SiteSetting.ai_summarization_enabled = true
SiteSetting.ai_custom_summarization_allowed_groups = group.id
SiteSetting.chat_enabled = true
SiteSetting.chat_allowed_groups = group.id
sign_in(current_user)
end
describe "#show" do
context "when the user is not allowed to join the channel" do
fab!(:channel) { Fabricate(:private_category_channel) }
it "returns a 403" do
get "/discourse-ai/summarization/channels/#{channel.id}", params: { since: 6 }
expect(response.status).to eq(403)
end
end
end
end