Sam 1320eed9b2
FEATURE: move summary to use llm_model (#699)
This allows summary to use the new LLM models and migrates of API key based model selection

Claude 3.5 etc... all work now. 

---------

Co-authored-by: Roman Rizzi <rizziromanalejandro@gmail.com>
2024-07-04 10:48:18 +10:00

37 lines
988 B
Ruby

# frozen_string_literal: true
module DiscourseAi
module Summarization
class SummaryController < ::ApplicationController
requires_plugin ::DiscourseAi::PLUGIN_NAME
def show
topic = Topic.find(params[:topic_id])
guardian.ensure_can_see!(topic)
raise Discourse::NotFound if !guardian.can_see_summary?(topic)
RateLimiter.new(current_user, "summary", 6, 5.minutes).performed! if current_user
opts = params.permit(:skip_age_check)
if params[:stream] && current_user
Jobs.enqueue(
:stream_topic_ai_summary,
topic_id: topic.id,
user_id: current_user.id,
opts: opts.as_json,
)
render json: success_json
else
hijack do
summary = DiscourseAi::TopicSummarization.summarize(topic, current_user, opts)
render_serialized(summary, AiTopicSummarySerializer)
end
end
end
end
end
end