226 lines
6.6 KiB
Ruby
226 lines
6.6 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
module DiscourseAi
|
|
module Admin
|
|
class AiPersonasController < ::Admin::AdminController
|
|
requires_plugin ::DiscourseAi::PLUGIN_NAME
|
|
|
|
before_action :find_ai_persona, only: %i[edit update destroy create_user]
|
|
|
|
def index
|
|
ai_personas =
|
|
AiPersona.ordered.map do |persona|
|
|
# we use a special serializer here cause names and descriptions are
|
|
# localized for system personas
|
|
LocalizedAiPersonaSerializer.new(persona, root: false)
|
|
end
|
|
tools =
|
|
DiscourseAi::AiBot::Personas::Persona.all_available_tools.map do |tool|
|
|
AiToolSerializer.new(tool, root: false)
|
|
end
|
|
AiTool
|
|
.where(enabled: true)
|
|
.each do |tool|
|
|
tools << {
|
|
id: "custom-#{tool.id}",
|
|
name: I18n.t("discourse_ai.tools.custom_name", name: tool.name.capitalize),
|
|
}
|
|
end
|
|
llms =
|
|
DiscourseAi::Configuration::LlmEnumerator.values.map do |hash|
|
|
{ id: hash[:value], name: hash[:name] }
|
|
end
|
|
render json: { ai_personas: ai_personas, meta: { tools: tools, llms: llms } }
|
|
end
|
|
|
|
def new
|
|
end
|
|
|
|
def edit
|
|
render json: LocalizedAiPersonaSerializer.new(@ai_persona)
|
|
end
|
|
|
|
def create
|
|
ai_persona = AiPersona.new(ai_persona_params.except(:rag_uploads))
|
|
if ai_persona.save
|
|
RagDocumentFragment.link_target_and_uploads(ai_persona, attached_upload_ids)
|
|
|
|
render json: {
|
|
ai_persona: LocalizedAiPersonaSerializer.new(ai_persona, root: false),
|
|
},
|
|
status: :created
|
|
else
|
|
render_json_error ai_persona
|
|
end
|
|
end
|
|
|
|
def create_user
|
|
user = @ai_persona.create_user!
|
|
render json: BasicUserSerializer.new(user, root: "user")
|
|
end
|
|
|
|
def update
|
|
if @ai_persona.update(ai_persona_params.except(:rag_uploads))
|
|
RagDocumentFragment.update_target_uploads(@ai_persona, attached_upload_ids)
|
|
|
|
render json: LocalizedAiPersonaSerializer.new(@ai_persona, root: false)
|
|
else
|
|
render_json_error @ai_persona
|
|
end
|
|
end
|
|
|
|
def destroy
|
|
if @ai_persona.destroy
|
|
head :no_content
|
|
else
|
|
render_json_error @ai_persona
|
|
end
|
|
end
|
|
|
|
def stream_reply
|
|
persona =
|
|
AiPersona.find_by(name: params[:persona_name]) ||
|
|
AiPersona.find_by(id: params[:persona_id])
|
|
return render_json_error(I18n.t("discourse_ai.errors.persona_not_found")) if persona.nil?
|
|
|
|
return render_json_error(I18n.t("discourse_ai.errors.persona_disabled")) if !persona.enabled
|
|
|
|
if persona.default_llm.blank?
|
|
return render_json_error(I18n.t("discourse_ai.errors.no_default_llm"))
|
|
end
|
|
|
|
if params[:query].blank?
|
|
return render_json_error(I18n.t("discourse_ai.errors.no_query_specified"))
|
|
end
|
|
|
|
if !persona.user_id
|
|
return render_json_error(I18n.t("discourse_ai.errors.no_user_for_persona"))
|
|
end
|
|
|
|
if !params[:username] && !params[:user_unique_id]
|
|
return render_json_error(I18n.t("discourse_ai.errors.no_user_specified"))
|
|
end
|
|
|
|
user = nil
|
|
|
|
if params[:username]
|
|
user = User.find_by_username(params[:username])
|
|
return render_json_error(I18n.t("discourse_ai.errors.user_not_found")) if user.nil?
|
|
elsif params[:user_unique_id]
|
|
user = stage_user
|
|
end
|
|
|
|
raise Discourse::NotFound if user.nil?
|
|
|
|
topic_id = params[:topic_id].to_i
|
|
topic = nil
|
|
|
|
if topic_id > 0
|
|
topic = Topic.find(topic_id)
|
|
|
|
if topic.topic_allowed_users.where(user_id: user.id).empty?
|
|
return render_json_error(I18n.t("discourse_ai.errors.user_not_allowed"))
|
|
end
|
|
end
|
|
|
|
hijack = request.env["rack.hijack"]
|
|
io = hijack.call
|
|
|
|
DiscourseAi::AiBot::ResponseHttpStreamer.queue_streamed_reply(
|
|
io: io,
|
|
persona: persona,
|
|
user: user,
|
|
topic: topic,
|
|
query: params[:query].to_s,
|
|
custom_instructions: params[:custom_instructions].to_s,
|
|
current_user: current_user,
|
|
)
|
|
end
|
|
|
|
private
|
|
|
|
AI_STREAM_CONVERSATION_UNIQUE_ID = "ai-stream-conversation-unique-id"
|
|
|
|
def stage_user
|
|
unique_id = params[:user_unique_id].to_s
|
|
field = UserCustomField.find_by(name: AI_STREAM_CONVERSATION_UNIQUE_ID, value: unique_id)
|
|
|
|
if field
|
|
field.user
|
|
else
|
|
preferred_username = params[:preferred_username]
|
|
username = UserNameSuggester.suggest(preferred_username || unique_id)
|
|
|
|
user =
|
|
User.new(
|
|
username: username,
|
|
email: "#{SecureRandom.hex}@invalid.com",
|
|
staged: true,
|
|
active: false,
|
|
)
|
|
user.custom_fields[AI_STREAM_CONVERSATION_UNIQUE_ID] = unique_id
|
|
user.save!
|
|
user
|
|
end
|
|
end
|
|
|
|
def find_ai_persona
|
|
@ai_persona = AiPersona.find(params[:id])
|
|
end
|
|
|
|
def attached_upload_ids
|
|
ai_persona_params[:rag_uploads].to_a.map { |h| h[:id] }
|
|
end
|
|
|
|
def ai_persona_params
|
|
permitted =
|
|
params.require(:ai_persona).permit(
|
|
:name,
|
|
:description,
|
|
:enabled,
|
|
:system_prompt,
|
|
:priority,
|
|
:top_p,
|
|
:temperature,
|
|
:default_llm,
|
|
:user_id,
|
|
:max_context_posts,
|
|
:vision_enabled,
|
|
:vision_max_pixels,
|
|
:rag_chunk_tokens,
|
|
:rag_chunk_overlap_tokens,
|
|
:rag_conversation_chunks,
|
|
:question_consolidator_llm,
|
|
:allow_chat_channel_mentions,
|
|
:allow_chat_direct_messages,
|
|
:allow_topic_mentions,
|
|
:allow_personal_messages,
|
|
:tool_details,
|
|
:forced_tool_count,
|
|
:force_default_llm,
|
|
allowed_group_ids: [],
|
|
rag_uploads: [:id],
|
|
)
|
|
|
|
if tools = params.dig(:ai_persona, :tools)
|
|
permitted[:tools] = permit_tools(tools)
|
|
end
|
|
|
|
permitted
|
|
end
|
|
|
|
def permit_tools(tools)
|
|
return [] if !tools.is_a?(Array)
|
|
|
|
tools.filter_map do |tool, options, force_tool|
|
|
break nil if !tool.is_a?(String)
|
|
options&.permit! if options && options.is_a?(ActionController::Parameters)
|
|
|
|
# this is simpler from a storage perspective, 1 way to store tools
|
|
[tool, options, !!force_tool]
|
|
end
|
|
end
|
|
end
|
|
end
|
|
end
|