mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-06-29 02:52:16 +00:00
FEATURE: allow seeing configured LLM on feature page (#1460)
This is an interim fix so we can at least tell what feature is being used for what LLM. It also adds some test coverage to the feature page.
This commit is contained in:
parent
1f851bb2e1
commit
471f96f972
@ -38,6 +38,10 @@ module DiscourseAi
|
||||
{
|
||||
name: feature.name,
|
||||
persona: serialize_persona(persona_id_obj_hash[feature.persona_id]),
|
||||
llm_model: {
|
||||
id: feature.llm_model&.id,
|
||||
name: feature.llm_model&.name,
|
||||
},
|
||||
enabled: feature.enabled?,
|
||||
}
|
||||
end
|
||||
|
@ -59,6 +59,19 @@ const AiFeaturesList = <template>
|
||||
{{i18n "discourse_ai.features.no_persona"}}
|
||||
{{/if}}
|
||||
</div>
|
||||
<div class="ai-feature-card__llm">
|
||||
<span>{{i18n "discourse_ai.features.llm"}}</span>
|
||||
{{#if feature.llm_model.name}}
|
||||
<DButton
|
||||
class="btn-flat btn-small ai-feature-card__llm-button"
|
||||
@translatedLabel={{feature.llm_model.name}}
|
||||
@route="adminPlugins.show.discourse-ai-llms.edit"
|
||||
@routeModels={{feature.llm_model.id}}
|
||||
/>
|
||||
{{else}}
|
||||
{{i18n "discourse_ai.features.no_llm"}}
|
||||
{{/if}}
|
||||
</div>
|
||||
{{#if feature.persona}}
|
||||
<div class="ai-feature-card__groups">
|
||||
<span>{{i18n "discourse_ai.features.groups"}}</span>
|
||||
|
@ -24,6 +24,7 @@
|
||||
padding: 0.5rem;
|
||||
display: block;
|
||||
|
||||
&__llm,
|
||||
&__persona,
|
||||
&__groups {
|
||||
font-size: var(--font-down-1-rem);
|
||||
@ -37,12 +38,18 @@
|
||||
padding-left: 0;
|
||||
}
|
||||
|
||||
&__groups {
|
||||
display: flex;
|
||||
flex-flow: row wrap;
|
||||
gap: 0.25em;
|
||||
}
|
||||
|
||||
&__item-groups {
|
||||
list-style: none;
|
||||
display: flex;
|
||||
flex-flow: row wrap;
|
||||
gap: 0.25em;
|
||||
margin: 0.5em 0;
|
||||
margin: 0;
|
||||
|
||||
li {
|
||||
font-size: var(--font-down-1);
|
||||
|
@ -188,6 +188,8 @@ en:
|
||||
disabled: "(disabled)"
|
||||
persona: "Persona:"
|
||||
groups: "Groups:"
|
||||
llm: "LLM:"
|
||||
no_llm: "No LLM selected"
|
||||
no_persona: "Not set"
|
||||
no_groups: "None"
|
||||
edit: "Edit"
|
||||
|
@ -286,11 +286,15 @@ module DiscourseAi
|
||||
DiscourseAi::Personas::Bot.as(user, persona: persona_klass.new, model: llm_model)
|
||||
end
|
||||
|
||||
def find_ai_helper_model(helper_mode, persona_klass)
|
||||
self.class.find_ai_helper_model(helper_mode, persona_klass)
|
||||
end
|
||||
|
||||
# Priorities are:
|
||||
# 1. Persona's default LLM
|
||||
# 2. Hidden `ai_helper_model` setting, or `ai_helper_image_caption_model` for image_caption.
|
||||
# 3. Newest LLM config
|
||||
def find_ai_helper_model(helper_mode, persona_klass)
|
||||
def self.find_ai_helper_model(helper_mode, persona_klass)
|
||||
model_id = persona_klass.default_llm_id
|
||||
|
||||
if !model_id
|
||||
|
@ -175,6 +175,28 @@ module DiscourseAi
|
||||
@enabled_by_setting = enabled_by_setting
|
||||
end
|
||||
|
||||
def llm_model
|
||||
persona = AiPersona.find_by(id: persona_id)
|
||||
return if persona.blank?
|
||||
|
||||
persona_klass = persona.class_instance
|
||||
|
||||
llm_model =
|
||||
case module_name
|
||||
when DiscourseAi::Configuration::Module::SUMMARIZATION
|
||||
DiscourseAi::Summarization.find_summarization_model(persona_klass)
|
||||
when DiscourseAi::Configuration::Module::AI_HELPER
|
||||
DiscourseAi::AiHelper::Assistant.find_ai_helper_model(name, persona_klass)
|
||||
when DiscourseAi::Configuration::Module::TRANSLATION
|
||||
DiscourseAi::Translation::BaseTranslator.preferred_llm_model(persona_klass)
|
||||
end
|
||||
|
||||
if llm_model.blank? && persona.default_llm_id
|
||||
llm_model = LlmModel.find_by(id: persona.default_llm_id)
|
||||
end
|
||||
llm_model
|
||||
end
|
||||
|
||||
attr_reader :name, :persona_setting, :module_id, :module_name
|
||||
|
||||
def enabled?
|
||||
|
@ -19,7 +19,7 @@ module DiscourseAi
|
||||
persona_klass = ai_persona.class_instance
|
||||
persona = persona_klass.new
|
||||
|
||||
model = LlmModel.find_by(id: preferred_llm_model(persona_klass))
|
||||
model = self.class.preferred_llm_model(persona_klass)
|
||||
return nil if model.blank?
|
||||
|
||||
bot = DiscourseAi::Personas::Bot.as(translation_user, persona:, model:)
|
||||
@ -59,8 +59,10 @@ module DiscourseAi
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def preferred_llm_model(persona_klass)
|
||||
persona_klass.default_llm_id || SiteSetting.ai_translation_model&.split(":")&.last
|
||||
def self.preferred_llm_model(persona_klass)
|
||||
id = persona_klass.default_llm_id || SiteSetting.ai_translation_model&.split(":")&.last
|
||||
return nil if id.blank?
|
||||
LlmModel.find_by(id:)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -20,7 +20,7 @@ module DiscourseAi
|
||||
persona_klass = ai_persona.class_instance
|
||||
persona = persona_klass.new
|
||||
|
||||
llm_model = LlmModel.find_by(id: preferred_llm_model(persona_klass))
|
||||
llm_model = DiscourseAi::Translation::BaseTranslator.preferred_llm_model(persona_klass)
|
||||
return nil if llm_model.blank?
|
||||
|
||||
bot =
|
||||
@ -44,12 +44,6 @@ module DiscourseAi
|
||||
end
|
||||
structured_output&.read_buffered_property(:locale) || []
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def preferred_llm_model(persona_klass)
|
||||
persona_klass.default_llm_id || SiteSetting.ai_translation_model&.split(":")&.last
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
146
spec/configuration/feature_spec.rb
Normal file
146
spec/configuration/feature_spec.rb
Normal file
@ -0,0 +1,146 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "rails_helper"
|
||||
|
||||
RSpec.describe DiscourseAi::Configuration::Feature do
|
||||
fab!(:llm_model)
|
||||
fab!(:ai_persona) { Fabricate(:ai_persona, default_llm_id: llm_model.id) }
|
||||
|
||||
def allow_configuring_setting(&block)
|
||||
DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) { block.call }
|
||||
end
|
||||
|
||||
describe "#llm_model" do
|
||||
context "when persona is not found" do
|
||||
it "returns nil when persona_id is invalid" do
|
||||
ai_feature =
|
||||
described_class.new(
|
||||
"topic_summaries",
|
||||
"ai_summarization_persona",
|
||||
DiscourseAi::Configuration::Module::SUMMARIZATION_ID,
|
||||
DiscourseAi::Configuration::Module::SUMMARIZATION,
|
||||
)
|
||||
|
||||
SiteSetting.ai_summarization_persona = 999_999
|
||||
expect(ai_feature.llm_model).to be_nil
|
||||
end
|
||||
end
|
||||
|
||||
context "with summarization module" do
|
||||
let(:ai_feature) do
|
||||
described_class.new(
|
||||
"topic_summaries",
|
||||
"ai_summarization_persona",
|
||||
DiscourseAi::Configuration::Module::SUMMARIZATION_ID,
|
||||
DiscourseAi::Configuration::Module::SUMMARIZATION,
|
||||
)
|
||||
end
|
||||
|
||||
it "returns the configured llm model" do
|
||||
SiteSetting.ai_summarization_persona = ai_persona.id
|
||||
allow_configuring_setting { SiteSetting.ai_summarization_model = "custom:#{llm_model.id}" }
|
||||
expect(ai_feature.llm_model).to eq(llm_model)
|
||||
end
|
||||
end
|
||||
|
||||
context "with AI helper module" do
|
||||
let(:ai_feature) do
|
||||
described_class.new(
|
||||
"proofread",
|
||||
"ai_helper_proofreader_persona",
|
||||
DiscourseAi::Configuration::Module::AI_HELPER_ID,
|
||||
DiscourseAi::Configuration::Module::AI_HELPER,
|
||||
)
|
||||
end
|
||||
|
||||
it "returns the persona's default llm when no specific helper model is set" do
|
||||
SiteSetting.ai_helper_proofreader_persona = ai_persona.id
|
||||
SiteSetting.ai_helper_model = ""
|
||||
|
||||
expect(ai_feature.llm_model).to eq(llm_model)
|
||||
end
|
||||
end
|
||||
|
||||
context "with translation module" do
|
||||
fab!(:translation_model) { Fabricate(:llm_model) }
|
||||
|
||||
let(:ai_feature) do
|
||||
described_class.new(
|
||||
"locale_detector",
|
||||
"ai_translation_locale_detector_persona",
|
||||
DiscourseAi::Configuration::Module::TRANSLATION_ID,
|
||||
DiscourseAi::Configuration::Module::TRANSLATION,
|
||||
)
|
||||
end
|
||||
|
||||
it "uses translation model when configured" do
|
||||
SiteSetting.ai_translation_locale_detector_persona = ai_persona.id
|
||||
ai_persona.update!(default_llm_id: nil)
|
||||
allow_configuring_setting do
|
||||
SiteSetting.ai_translation_model = "custom:#{translation_model.id}"
|
||||
end
|
||||
|
||||
expect(ai_feature.llm_model).to eq(translation_model)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#enabled?" do
|
||||
it "returns true when no enabled_by_setting is specified" do
|
||||
ai_feature =
|
||||
described_class.new(
|
||||
"topic_summaries",
|
||||
"ai_summarization_persona",
|
||||
DiscourseAi::Configuration::Module::SUMMARIZATION_ID,
|
||||
DiscourseAi::Configuration::Module::SUMMARIZATION,
|
||||
)
|
||||
|
||||
expect(ai_feature.enabled?).to be true
|
||||
end
|
||||
|
||||
it "respects the enabled_by_setting when specified" do
|
||||
ai_feature =
|
||||
described_class.new(
|
||||
"gists",
|
||||
"ai_summary_gists_persona",
|
||||
DiscourseAi::Configuration::Module::SUMMARIZATION_ID,
|
||||
DiscourseAi::Configuration::Module::SUMMARIZATION,
|
||||
enabled_by_setting: "ai_summary_gists_enabled",
|
||||
)
|
||||
|
||||
SiteSetting.ai_summary_gists_enabled = false
|
||||
expect(ai_feature.enabled?).to be false
|
||||
|
||||
SiteSetting.ai_summary_gists_enabled = true
|
||||
expect(ai_feature.enabled?).to be true
|
||||
end
|
||||
end
|
||||
|
||||
describe "#persona_id" do
|
||||
it "returns the persona id from site settings" do
|
||||
ai_feature =
|
||||
described_class.new(
|
||||
"topic_summaries",
|
||||
"ai_summarization_persona",
|
||||
DiscourseAi::Configuration::Module::SUMMARIZATION_ID,
|
||||
DiscourseAi::Configuration::Module::SUMMARIZATION,
|
||||
)
|
||||
|
||||
SiteSetting.ai_summarization_persona = ai_persona.id
|
||||
expect(ai_feature.persona_id).to eq(ai_persona.id)
|
||||
end
|
||||
end
|
||||
|
||||
describe ".find_features_using" do
|
||||
it "returns all features using a specific persona" do
|
||||
SiteSetting.ai_summarization_persona = ai_persona.id
|
||||
SiteSetting.ai_helper_proofreader_persona = ai_persona.id
|
||||
SiteSetting.ai_translation_locale_detector_persona = 999
|
||||
|
||||
features = described_class.find_features_using(persona_id: ai_persona.id)
|
||||
|
||||
expect(features.map(&:name)).to include("topic_summaries", "proofread")
|
||||
expect(features.map(&:name)).not_to include("locale_detector")
|
||||
end
|
||||
end
|
||||
end
|
Loading…
x
Reference in New Issue
Block a user