FEATURE: Initial support for seeded LLMs (#756)

This commit is contained in:
Rafael dos Santos Silva 2024-08-28 15:57:58 -03:00 committed by GitHub
parent 0687ec75c3
commit a08d168740
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 210 additions and 117 deletions

View File

@ -45,6 +45,10 @@ module DiscourseAi
def update
llm_model = LlmModel.find(params[:id])
if llm_model.seeded?
return render_json_error(I18n.t("discourse_ai.llm.cannot_edit_builtin"), status: 403)
end
if llm_model.update(ai_llm_params(updating: llm_model))
llm_model.toggle_companion_user
render json: LlmModelSerializer.new(llm_model)
@ -56,6 +60,10 @@ module DiscourseAi
def destroy
llm_model = LlmModel.find(params[:id])
if llm_model.seeded?
return render_json_error(I18n.t("discourse_ai.llm.cannot_delete_builtin"), status: 403)
end
in_use_by = DiscourseAi::Configuration::LlmValidator.new.modules_using(llm_model)
if !in_use_by.empty?

View File

@ -89,6 +89,10 @@ class LlmModel < ActiveRecord::Base
provider_params&.dig(key)
end
def seeded?
id < 0
end
private
def required_provider_params

View File

@ -21,4 +21,16 @@ class LlmModelSerializer < ApplicationSerializer
def used_by
DiscourseAi::Configuration::LlmValidator.new.modules_using(object)
end
def api_key
object.seeded? ? "********" : object.api_key
end
def url
object.seeded? ? "********" : object.url
end
def provider
object.seeded? ? "CDCK" : object.provider
end
end

View File

@ -62,6 +62,10 @@ export default class AiLlmEditorForm extends Component {
return this.args.model.used_by?.join(", ");
}
get seeded() {
return this.args.model.id < 0;
}
get inUseWarning() {
return I18n.t("discourse_ai.llms.in_use_warning", {
settings: this.modulesUsingModel,
@ -170,13 +174,19 @@ export default class AiLlmEditorForm extends Component {
}
<template>
{{#if this.seeded}}
<div class="alert alert-info">
{{icon "exclamation-circle"}}
{{i18n "discourse_ai.llms.seeded_warning"}}
</div>
{{/if}}
{{#if this.modulesUsingModel}}
<div class="alert alert-info">
{{icon "exclamation-circle"}}
{{this.inUseWarning}}
</div>
{{/if}}
<form class="form-horizontal ai-llm-editor">
<form class="form-horizontal ai-llm-editor {{if this.seeded 'seeded'}}">
<div class="control-group">
<label>{{i18n "discourse_ai.llms.display_name"}}</label>
<Input
@ -205,6 +215,7 @@ export default class AiLlmEditorForm extends Component {
@class="ai-llm-editor__provider"
/>
</div>
{{#unless this.seeded}}
{{#if this.canEditURL}}
<div class="control-group">
<label>{{I18n.t "discourse_ai.llms.url"}}</label>
@ -226,7 +237,10 @@ export default class AiLlmEditorForm extends Component {
required="true"
{{on "focusout" this.makeApiKeySecret}}
/>
<DButton @action={{this.toggleApiKeySecret}} @icon="far-eye-slash" />
<DButton
@action={{this.toggleApiKeySecret}}
@icon="far-eye-slash"
/>
</div>
</div>
{{#each-in this.metaProviderParams as |field type|}}
@ -327,6 +341,7 @@ export default class AiLlmEditorForm extends Component {
</DButton>
{{/unless}}
</div>
{{/unless}}
<div class="control-group ai-llm-editor-tests">
{{#if this.displayTestResult}}

View File

@ -236,6 +236,7 @@ en:
back: "Back"
confirm_delete: Are you sure you want to delete this model?
delete: Delete
seeded_warning: "This model is pre-configured on your site and cannot be edited."
in_use_warning:
one: "This model is currently used by the %{settings} setting. If misconfigured, the feature won't work as expected."
other: "This model is currently used by the following settings: %{settings}. If misconfigured, features won't work as expected. "
@ -268,6 +269,7 @@ en:
google: "Google"
azure: "Azure"
ollama: "Ollama"
CDCK: "CDCK"
provider_fields:
access_key_id: "AWS Bedrock Access key ID"

View File

@ -312,6 +312,7 @@ en:
disable_module_first: "You have to disable %{setting} first."
set_llm_first: "Set %{setting} first."
model_unreachable: "We couldn't get a response from this model. Check your settings first."
invalid_seeded_model: "You can't use this model with this feature."
endpoints:
not_configured: "%{display_name} (not configured)"
configuration_hint:
@ -321,6 +322,7 @@ en:
delete_failed:
one: "We couldn't delete this model because %{settings} is using it. Update the setting and try again."
other: "We couldn't delete this model because %{settings} are using it. Update the settings and try again."
cannot_edit_builtin: "You can't edit a built-in model."
embeddings:
configuration:

View File

@ -289,6 +289,16 @@ discourse_ai:
default: "10" # 10: @trust_level_0
allow_any: false
refresh: true
ai_helper_model_allowed_seeded_models:
default: ""
hidden: true
type: list
list_type: compact
ai_helper_image_caption_model_allowed_seeded_models:
default: ""
hidden: true
type: list
list_type: compact
ai_embeddings_enabled:
default: false
@ -340,6 +350,11 @@ discourse_ai:
allow_any: false
enum: "DiscourseAi::Configuration::LlmEnumerator"
validator: "DiscourseAi::Configuration::LlmValidator"
ai_embeddings_semantic_search_hyde_model_allowed_seeded_models:
default: ""
hidden: true
type: list
list_type: compact
ai_embeddings_semantic_quick_search_enabled:
default: false
client: true
@ -366,6 +381,11 @@ discourse_ai:
default: ""
hidden: true
choices: "DiscourseAi::Configuration::LlmEnumerator.old_summarization_options + ['']"
ai_summarization_model_allowed_seeded_models:
default: ""
hidden: true
type: list
list_type: compact
ai_bot_enabled:
default: false
@ -406,3 +426,8 @@ discourse_ai:
ai_automation_max_triage_per_post_per_minute:
default: 2
hidden: true
ai_automation_allowed_seeded_models:
default: ""
hidden: true
type: list
list_type: compact

View File

@ -8,7 +8,13 @@ module DiscourseAi
FROM llm_models
SQL
values.each { |value_h| value_h["id"] = "custom:#{value_h["id"]}" }
values =
values
.filter do |value_h|
value_h["id"] > 0 ||
SiteSetting.ai_automation_allowed_seeded_models_map.includes?(value_h["id"].to_s)
end
.each { |value_h| value_h["id"] = "custom:#{value_h["id"]}" }
values
end

View File

@ -15,6 +15,8 @@ module DiscourseAi
return !@parent_enabled
end
allowed_seeded_model?(val)
run_test(val).tap { |result| @unreachable = result }
rescue StandardError => e
raise e if Rails.env.test?
@ -45,6 +47,10 @@ module DiscourseAi
)
end
if @invalid_seeded_model
return I18n.t("discourse_ai.llm.configuration.invalid_seeded_model")
end
return unless @unreachable
I18n.t("discourse_ai.llm.configuration.model_unreachable")
@ -61,6 +67,19 @@ module DiscourseAi
ai_summarization_enabled: :ai_summarization_model,
}
end
def allowed_seeded_model?(val)
id = val.split(":").last
return true if id.to_i > 0
setting = @opts[:name]
allowed_list = SiteSetting.public_send("#{setting}_allowed_seeded_models")
if allowed_list.split("|").exclude?(id)
@invalid_seeded_model = true
raise Discourse::InvalidParameters.new
end
end
end
end
end