FIX: LlmModel validations. (#742)

- Validate fields to reduce the chance of breaking features by a misconfigured model.
- Fixed a bug where the URL might get deleted during an update.
- Display a warning when a model is currently in use.
This commit is contained in:
Roman Rizzi 2024-08-06 14:35:35 -03:00 committed by GitHub
parent d17bbc2dc4
commit 7b4c099673
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 76 additions and 17 deletions

View File

@ -97,6 +97,8 @@ module DiscourseAi
private private
def ai_llm_params(updating: nil) def ai_llm_params(updating: nil)
return {} if params[:ai_llm].blank?
permitted = permitted =
params.require(:ai_llm).permit( params.require(:ai_llm).permit(
:display_name, :display_name,
@ -112,7 +114,8 @@ module DiscourseAi
provider = updating ? updating.provider : permitted[:provider] provider = updating ? updating.provider : permitted[:provider]
permit_url = provider != LlmModel::BEDROCK_PROVIDER_NAME permit_url = provider != LlmModel::BEDROCK_PROVIDER_NAME
permitted[:url] = params.dig(:ai_llm, :url) if permit_url new_url = params.dig(:ai_llm, :url)
permitted[:url] = new_url if permit_url && new_url
extra_field_names = LlmModel.provider_params.dig(provider&.to_sym, :fields).to_a extra_field_names = LlmModel.provider_params.dig(provider&.to_sym, :fields).to_a
received_prov_params = params.dig(:ai_llm, :provider_params) received_prov_params = params.dig(:ai_llm, :provider_params)

View File

@ -6,6 +6,13 @@ class LlmModel < ActiveRecord::Base
belongs_to :user belongs_to :user
validates :display_name, presence: true, length: { maximum: 100 }
validates :tokenizer, presence: true, inclusion: DiscourseAi::Completions::Llm.tokenizer_names
validates :provider, presence: true, inclusion: DiscourseAi::Completions::Llm.provider_names
validates :url, presence: true, unless: -> { provider == BEDROCK_PROVIDER_NAME }
validates_presence_of :name, :api_key
validates :max_prompt_tokens, numericality: { greater_than: 0 }
def self.provider_params def self.provider_params
{ {
aws_bedrock: { aws_bedrock: {

View File

@ -12,13 +12,13 @@ class LlmModelSerializer < ApplicationSerializer
:api_key, :api_key,
:url, :url,
:enabled_chat_bot, :enabled_chat_bot,
:shadowed_by_srv,
:provider_params, :provider_params,
:vision_enabled :vision_enabled,
:used_by
has_one :user, serializer: BasicUserSerializer, embed: :object has_one :user, serializer: BasicUserSerializer, embed: :object
def shadowed_by_srv def used_by
object.url.to_s.starts_with?("srv://") DiscourseAi::Configuration::LlmValidator.new.modules_using(object)
end end
end end

View File

@ -65,6 +65,17 @@ export default class AiLlmEditorForm extends Component {
return this.metaProviderParams.url_editable !== false; return this.metaProviderParams.url_editable !== false;
} }
get modulesUsingModel() {
return this.args.model.used_by?.join(", ");
}
get inUseWarning() {
return I18n.t("discourse_ai.llms.in_use_warning", {
settings: this.modulesUsingModel,
count: this.args.model.used_by.length,
});
}
@computed("args.model.provider") @computed("args.model.provider")
get metaProviderParams() { get metaProviderParams() {
return ( return (
@ -166,6 +177,12 @@ export default class AiLlmEditorForm extends Component {
} }
<template> <template>
{{#if this.modulesUsingModel}}
<div class="alert alert-info">
{{icon "exclamation-circle"}}
{{this.inUseWarning}}
</div>
{{/if}}
<form class="form-horizontal ai-llm-editor"> <form class="form-horizontal ai-llm-editor">
<div class="control-group"> <div class="control-group">
<label>{{i18n "discourse_ai.llms.display_name"}}</label> <label>{{i18n "discourse_ai.llms.display_name"}}</label>

View File

@ -236,6 +236,10 @@ en:
back: "Back" back: "Back"
confirm_delete: Are you sure you want to delete this model? confirm_delete: Are you sure you want to delete this model?
delete: Delete delete: Delete
in_use_warning:
one: "This model is currently used by the %{settings} setting. If misconfigured, the feature won't work as expected."
other: "This model is currently used by the following settings: %{settings}. If misconfigured, features won't work as expected. "
preconfigured_llms: "Select your LLM" preconfigured_llms: "Select your LLM"
preconfigured: preconfigured:

View File

@ -43,6 +43,8 @@ Fabricator(:fake_model, from: :llm_model) do
provider "fake" provider "fake"
tokenizer "DiscourseAi::Tokenizer::OpenAiTokenizer" tokenizer "DiscourseAi::Tokenizer::OpenAiTokenizer"
max_prompt_tokens 32_000 max_prompt_tokens 32_000
api_key "fake"
url "https://fake.test/"
end end
Fabricator(:gemini_model, from: :llm_model) do Fabricator(:gemini_model, from: :llm_model) do

View File

@ -20,19 +20,19 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
end end
describe "POST #create" do describe "POST #create" do
context "with valid attributes" do let(:valid_attrs) do
let(:valid_attrs) do {
{ display_name: "My cool LLM",
display_name: "My cool LLM", name: "gpt-3.5",
name: "gpt-3.5", provider: "open_ai",
provider: "open_ai", url: "https://test.test/v1/chat/completions",
url: "https://test.test/v1/chat/completions", api_key: "test",
api_key: "test", tokenizer: "DiscourseAi::Tokenizer::OpenAiTokenizer",
tokenizer: "DiscourseAi::Tokenizer::OpenAiTokenizer", max_prompt_tokens: 16_000,
max_prompt_tokens: 16_000, }
} end
end
context "with valid attributes" do
it "creates a new LLM model" do it "creates a new LLM model" do
post "/admin/plugins/discourse-ai/ai-llms.json", params: { ai_llm: valid_attrs } post "/admin/plugins/discourse-ai/ai-llms.json", params: { ai_llm: valid_attrs }
@ -84,6 +84,19 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
expect(created_model.lookup_custom_param("access_key_id")).to be_nil expect(created_model.lookup_custom_param("access_key_id")).to be_nil
end end
end end
context "with invalid attributes" do
it "doesn't create a model" do
post "/admin/plugins/discourse-ai/ai-llms.json",
params: {
ai_llm: valid_attrs.except(:url),
}
created_model = LlmModel.last
expect(created_model).to be_nil
end
end
end end
describe "PUT #update" do describe "PUT #update" do
@ -130,6 +143,19 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
end end
end end
context "with invalid update params" do
it "doesn't update the model" do
put "/admin/plugins/discourse-ai/ai-llms/#{llm_model.id}.json",
params: {
ai_llm: {
url: "",
},
}
expect(response.status).to eq(422)
end
end
context "with provider-specific params" do context "with provider-specific params" do
it "updates provider-specific config params" do it "updates provider-specific config params" do
provider_params = { organization: "Discourse" } provider_params = { organization: "Discourse" }