2024-05-13 11:46:42 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
|
|
|
module DiscourseAi
|
|
|
|
module Admin
|
|
|
|
class AiLlmsController < ::Admin::AdminController
|
|
|
|
requires_plugin ::DiscourseAi::PLUGIN_NAME
|
|
|
|
|
|
|
|
def index
|
2024-06-19 01:49:36 -04:00
|
|
|
llms = LlmModel.all.order(:display_name)
|
2024-05-13 11:46:42 -04:00
|
|
|
|
|
|
|
render json: {
|
|
|
|
ai_llms:
|
|
|
|
ActiveModel::ArraySerializer.new(
|
|
|
|
llms,
|
|
|
|
each_serializer: LlmModelSerializer,
|
|
|
|
root: false,
|
|
|
|
).as_json,
|
|
|
|
meta: {
|
2024-06-24 18:26:30 -04:00
|
|
|
provider_params: LlmModel.provider_params,
|
2024-06-21 03:32:15 -04:00
|
|
|
presets: DiscourseAi::Completions::Llm.presets,
|
2024-05-13 11:46:42 -04:00
|
|
|
providers: DiscourseAi::Completions::Llm.provider_names,
|
|
|
|
tokenizers:
|
|
|
|
DiscourseAi::Completions::Llm.tokenizer_names.map { |tn|
|
|
|
|
{ id: tn, name: tn.split("::").last }
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
def show
|
|
|
|
llm_model = LlmModel.find(params[:id])
|
|
|
|
render json: LlmModelSerializer.new(llm_model)
|
|
|
|
end
|
|
|
|
|
|
|
|
def create
|
2024-05-13 14:54:42 -04:00
|
|
|
llm_model = LlmModel.new(ai_llm_params)
|
|
|
|
if llm_model.save
|
2024-06-25 11:45:19 -04:00
|
|
|
llm_model.toggle_companion_user
|
2024-06-27 09:43:00 -04:00
|
|
|
render json: { ai_persona: LlmModelSerializer.new(llm_model) }, status: :created
|
2024-05-13 11:46:42 -04:00
|
|
|
else
|
2024-07-31 03:53:18 -04:00
|
|
|
render_json_error llm_model
|
2024-05-13 11:46:42 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def update
|
|
|
|
llm_model = LlmModel.find(params[:id])
|
|
|
|
|
2024-06-24 18:26:30 -04:00
|
|
|
if llm_model.update(ai_llm_params(updating: llm_model))
|
2024-06-18 13:32:14 -04:00
|
|
|
llm_model.toggle_companion_user
|
2024-06-27 09:43:00 -04:00
|
|
|
render json: LlmModelSerializer.new(llm_model)
|
2024-05-13 11:46:42 -04:00
|
|
|
else
|
2024-07-31 03:53:18 -04:00
|
|
|
render_json_error llm_model
|
2024-05-13 11:46:42 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-05-27 15:44:08 -04:00
|
|
|
def destroy
|
|
|
|
llm_model = LlmModel.find(params[:id])
|
|
|
|
|
2024-06-19 17:01:35 -04:00
|
|
|
in_use_by = DiscourseAi::Configuration::LlmValidator.new.modules_using(llm_model)
|
2024-05-27 15:44:08 -04:00
|
|
|
|
|
|
|
if !in_use_by.empty?
|
|
|
|
return(
|
|
|
|
render_json_error(
|
|
|
|
I18n.t(
|
|
|
|
"discourse_ai.llm.delete_failed",
|
|
|
|
settings: in_use_by.join(", "),
|
|
|
|
count: in_use_by.length,
|
|
|
|
),
|
|
|
|
status: 409,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2024-06-25 11:45:19 -04:00
|
|
|
# Clean up companion users
|
|
|
|
llm_model.enabled_chat_bot = false
|
|
|
|
llm_model.toggle_companion_user
|
|
|
|
|
2024-05-27 15:44:08 -04:00
|
|
|
if llm_model.destroy
|
|
|
|
head :no_content
|
|
|
|
else
|
|
|
|
render_json_error llm_model
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-05-21 12:35:50 -04:00
|
|
|
def test
|
|
|
|
RateLimiter.new(current_user, "llm_test_#{current_user.id}", 3, 1.minute).performed!
|
|
|
|
|
|
|
|
llm_model = LlmModel.new(ai_llm_params)
|
|
|
|
|
2024-06-19 17:01:35 -04:00
|
|
|
DiscourseAi::Configuration::LlmValidator.new.run_test(llm_model)
|
2024-05-21 12:35:50 -04:00
|
|
|
|
|
|
|
render json: { success: true }
|
|
|
|
rescue DiscourseAi::Completions::Endpoints::Base::CompletionFailed => e
|
|
|
|
render json: { success: false, error: e.message }
|
|
|
|
end
|
|
|
|
|
2024-05-13 11:46:42 -04:00
|
|
|
private
|
|
|
|
|
2024-06-24 18:26:30 -04:00
|
|
|
def ai_llm_params(updating: nil)
|
2024-08-06 13:35:35 -04:00
|
|
|
return {} if params[:ai_llm].blank?
|
|
|
|
|
2024-06-24 18:26:30 -04:00
|
|
|
permitted =
|
|
|
|
params.require(:ai_llm).permit(
|
|
|
|
:display_name,
|
|
|
|
:name,
|
|
|
|
:provider,
|
|
|
|
:tokenizer,
|
|
|
|
:max_prompt_tokens,
|
|
|
|
:api_key,
|
|
|
|
:enabled_chat_bot,
|
2024-07-24 15:29:47 -04:00
|
|
|
:vision_enabled,
|
2024-06-24 18:26:30 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
provider = updating ? updating.provider : permitted[:provider]
|
2024-07-30 12:44:57 -04:00
|
|
|
permit_url = provider != LlmModel::BEDROCK_PROVIDER_NAME
|
2024-06-24 18:26:30 -04:00
|
|
|
|
2024-08-06 13:35:35 -04:00
|
|
|
new_url = params.dig(:ai_llm, :url)
|
|
|
|
permitted[:url] = new_url if permit_url && new_url
|
2024-06-24 18:26:30 -04:00
|
|
|
|
|
|
|
extra_field_names = LlmModel.provider_params.dig(provider&.to_sym, :fields).to_a
|
|
|
|
received_prov_params = params.dig(:ai_llm, :provider_params)
|
|
|
|
permitted[:provider_params] = received_prov_params.slice(
|
|
|
|
*extra_field_names,
|
|
|
|
).permit! if !extra_field_names.empty? && received_prov_params.present?
|
|
|
|
|
|
|
|
permitted
|
2024-05-13 11:46:42 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|