mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-08-01 10:53:27 +00:00
This commit introduces a new Forum Researcher persona specialized in deep forum content analysis along with comprehensive improvements to our AI infrastructure. Key additions: New Forum Researcher persona with advanced filtering and analysis capabilities Robust filtering system supporting tags, categories, dates, users, and keywords LLM formatter to efficiently process and chunk research results Infrastructure improvements: Implemented CancelManager class to centrally manage AI completion cancellations Replaced callback-based cancellation with a more robust pattern Added systematic cancellation monitoring with callbacks Other improvements: Added configurable default_enabled flag to control which personas are enabled by default Updated translation strings for the new researcher functionality Added comprehensive specs for the new components Renames Researcher -> Web Researcher This change makes our AI platform more stable while adding powerful research capabilities that can analyze forum trends and surface relevant content.
107 lines
2.9 KiB
Ruby
107 lines
2.9 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
describe DiscourseAi::Completions::CancelManager do
|
|
fab!(:model) { Fabricate(:anthropic_model, name: "test-model") }
|
|
|
|
it "can stop monitoring for cancellation cleanly" do
|
|
cancel_manager = DiscourseAi::Completions::CancelManager.new
|
|
cancel_manager.start_monitor(delay: 100) { false }
|
|
expect(cancel_manager.monitor_thread).not_to be_nil
|
|
cancel_manager.stop_monitor
|
|
expect(cancel_manager.cancelled?).to eq(false)
|
|
expect(cancel_manager.monitor_thread).to be_nil
|
|
end
|
|
|
|
it "can monitor for cancellation" do
|
|
cancel_manager = DiscourseAi::Completions::CancelManager.new
|
|
results = [true, false, false]
|
|
|
|
cancel_manager.start_monitor(delay: 0) { results.pop }
|
|
|
|
wait_for { cancel_manager.cancelled? == true }
|
|
wait_for { cancel_manager.monitor_thread.nil? }
|
|
|
|
expect(cancel_manager.cancelled?).to eq(true)
|
|
expect(cancel_manager.monitor_thread).to be_nil
|
|
end
|
|
|
|
it "should do nothing when cancel manager is already cancelled" do
|
|
cancel_manager = DiscourseAi::Completions::CancelManager.new
|
|
cancel_manager.cancel!
|
|
|
|
llm = model.to_llm
|
|
prompt =
|
|
DiscourseAi::Completions::Prompt.new(
|
|
"You are a test bot",
|
|
messages: [{ type: :user, content: "hello" }],
|
|
)
|
|
|
|
result = llm.generate(prompt, user: Discourse.system_user, cancel_manager: cancel_manager)
|
|
expect(result).to be_nil
|
|
end
|
|
|
|
it "should be able to cancel a completion" do
|
|
# Start an HTTP server that hangs indefinitely
|
|
server = TCPServer.new("127.0.0.1", 0)
|
|
port = server.addr[1]
|
|
|
|
begin
|
|
thread =
|
|
Thread.new do
|
|
loop do
|
|
begin
|
|
_client = server.accept
|
|
sleep(30) # Hold the connection longer than the test will run
|
|
break
|
|
rescue StandardError
|
|
# Server closed
|
|
break
|
|
end
|
|
end
|
|
end
|
|
|
|
# Create a model that points to our hanging server
|
|
model.update!(url: "http://127.0.0.1:#{port}")
|
|
|
|
cancel_manager = DiscourseAi::Completions::CancelManager.new
|
|
|
|
completion_thread =
|
|
Thread.new do
|
|
llm = model.to_llm
|
|
prompt =
|
|
DiscourseAi::Completions::Prompt.new(
|
|
"You are a test bot",
|
|
messages: [{ type: :user, content: "hello" }],
|
|
)
|
|
|
|
result = llm.generate(prompt, user: Discourse.system_user, cancel_manager: cancel_manager)
|
|
expect(result).to be_nil
|
|
expect(cancel_manager.cancelled).to eq(true)
|
|
end
|
|
|
|
wait_for { cancel_manager.callbacks.size == 1 }
|
|
|
|
cancel_manager.cancel!
|
|
completion_thread.join(2)
|
|
|
|
expect(completion_thread).not_to be_alive
|
|
ensure
|
|
begin
|
|
server.close
|
|
rescue StandardError
|
|
nil
|
|
end
|
|
begin
|
|
thread.kill
|
|
rescue StandardError
|
|
nil
|
|
end
|
|
begin
|
|
completion_thread&.kill
|
|
rescue StandardError
|
|
nil
|
|
end
|
|
end
|
|
end
|
|
end
|