FIX: Correctly save provider-specific params for new models. (#744)

Creating a new model, either manually or from presets, doesn't initialize the `provider_params` object, meaning their custom params won't persist.

Additionally, this change adds some validations for Bedrock params, which are mandatory, and a clear message when a completion fails because we cannot build the URL.
This commit is contained in:
Roman Rizzi 2024-08-07 16:08:56 -03:00 committed by GitHub
parent 3b5245dc54
commit 20efc9285e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 68 additions and 8 deletions

View File

@ -3,6 +3,7 @@ import DiscourseRoute from "discourse/routes/discourse";
export default DiscourseRoute.extend({ export default DiscourseRoute.extend({
async model() { async model() {
const record = this.store.createRecord("ai-llm"); const record = this.store.createRecord("ai-llm");
record.provider_params = {};
return record; return record;
}, },

View File

@ -12,6 +12,7 @@ class LlmModel < ActiveRecord::Base
validates :url, presence: true, unless: -> { provider == BEDROCK_PROVIDER_NAME } validates :url, presence: true, unless: -> { provider == BEDROCK_PROVIDER_NAME }
validates_presence_of :name, :api_key validates_presence_of :name, :api_key
validates :max_prompt_tokens, numericality: { greater_than: 0 } validates :max_prompt_tokens, numericality: { greater_than: 0 }
validate :required_provider_params
def self.provider_params def self.provider_params
{ {
@ -79,6 +80,18 @@ class LlmModel < ActiveRecord::Base
def lookup_custom_param(key) def lookup_custom_param(key)
provider_params&.dig(key) provider_params&.dig(key)
end end
private
def required_provider_params
return if provider != BEDROCK_PROVIDER_NAME
%w[access_key_id region].each do |field|
if lookup_custom_param(field).blank?
errors.add(:base, I18n.t("discourse_ai.llm_models.missing_provider_param", param: field))
end
end
end
end end
# == Schema Information # == Schema Information

View File

@ -30,14 +30,6 @@ export default class AiLlmEditorForm extends Component {
@tracked testError = null; @tracked testError = null;
@tracked apiKeySecret = true; @tracked apiKeySecret = true;
didReceiveAttrs() {
super.didReceiveAttrs(...arguments);
if (!this.args.model.provider_params) {
this.populateProviderParams(this.args.model.provider);
}
}
get selectedProviders() { get selectedProviders() {
const t = (provName) => { const t = (provName) => {
return I18n.t(`discourse_ai.llms.providers.${provName}`); return I18n.t(`discourse_ai.llms.providers.${provName}`);

View File

@ -326,3 +326,7 @@ en:
hint: hint:
one: "Make sure the `%{settings}` setting was configured." one: "Make sure the `%{settings}` setting was configured."
other: "Make sure the settings of the provider you want were configured. Options are: %{settings}" other: "Make sure the settings of the provider you want were configured. Options are: %{settings}"
llm_models:
missing_provider_param: "%{param} can't be blank"
bedrock_invalid_url: "Please complete all the fields to contact this model."

View File

@ -0,0 +1,6 @@
# frozen_string_literal: true
class AddDefaultToProviderParams < ActiveRecord::Migration[7.1]
def change
change_column_default :llm_models, :provider_params, from: nil, to: {}
end
end

View File

@ -66,6 +66,10 @@ module DiscourseAi
llm_model.name llm_model.name
end end
if region.blank? || bedrock_model_id.blank?
raise CompletionFailed.new(I18n.t("discourse_ai.llm_models.bedrock_invalid_url"))
end
api_url = api_url =
"https://bedrock-runtime.#{region}.amazonaws.com/model/#{bedrock_model_id}/invoke" "https://bedrock-runtime.#{region}.amazonaws.com/model/#{bedrock_model_id}/invoke"

View File

@ -97,6 +97,46 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
expect(created_model).to be_nil expect(created_model).to be_nil
end end
end end
context "with provider-specific params" do
it "doesn't create a model if a Bedrock param is missing" do
post "/admin/plugins/discourse-ai/ai-llms.json",
params: {
ai_llm:
valid_attrs.merge(
provider: "aws_bedrock",
provider_params: {
region: "us-east-1",
},
),
}
created_model = LlmModel.last
expect(response.status).to eq(422)
expect(created_model).to be_nil
end
it "creates the model if all required provider params are present" do
post "/admin/plugins/discourse-ai/ai-llms.json",
params: {
ai_llm:
valid_attrs.merge(
provider: "aws_bedrock",
provider_params: {
region: "us-east-1",
access_key_id: "test",
},
),
}
created_model = LlmModel.last
expect(response.status).to eq(201)
expect(created_model.lookup_custom_param("region")).to eq("us-east-1")
expect(created_model.lookup_custom_param("access_key_id")).to eq("test")
end
end
end end
describe "PUT #update" do describe "PUT #update" do