FEATURE: improve visibility of AI usage in LLM page (#845)

This changeset: 

1. Corrects some issues with "force_default_llm" not applying
2. Expands the LLM list page to show LLM usage
3. Clarifies better what "enabling a bot" on an llm means (you get it in the selector)
This commit is contained in:
Sam 2024-10-22 11:16:02 +11:00 committed by GitHub
parent 712a07c39b
commit a1f859a415
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 197 additions and 78 deletions

View File

@ -10,6 +10,8 @@ export default DiscourseRoute.extend({
record.set("rag_chunk_tokens", 374);
record.set("rag_chunk_overlap_tokens", 10);
record.set("rag_conversation_chunks", 10);
record.set("allow_personal_messages", true);
record.set("tool_details", false);
return record;
},

View File

@ -14,6 +14,9 @@ module DiscourseAi
llms,
each_serializer: LlmModelSerializer,
root: false,
scope: {
llm_usage: DiscourseAi::Configuration::LlmEnumerator.global_usage,
},
).as_json,
meta: {
provider_params: LlmModel.provider_params,

View File

@ -22,7 +22,16 @@ class LlmModelSerializer < ApplicationSerializer
has_one :user, serializer: BasicUserSerializer, embed: :object
def used_by
DiscourseAi::Configuration::LlmValidator.new.modules_using(object)
llm_usage =
(
if (scope && scope[:llm_usage])
scope[:llm_usage]
else
DiscourseAi::Configuration::LlmEnumerator.global_usage
end
)
llm_usage[object.id]
end
def api_key

View File

@ -9,7 +9,6 @@ import { later } from "@ember/runloop";
import { inject as service } from "@ember/service";
import { eq } from "truth-helpers";
import DButton from "discourse/components/d-button";
import DToggleSwitch from "discourse/components/d-toggle-switch";
import Avatar from "discourse/helpers/bound-avatar-template";
import { popupAjaxError } from "discourse/lib/ajax-error";
import icon from "discourse-common/helpers/d-icon";
@ -59,7 +58,20 @@ export default class AiLlmEditorForm extends Component {
}
get modulesUsingModel() {
return this.args.model.used_by?.join(", ");
const usedBy = this.args.model.used_by?.filter((m) => m.type !== "ai_bot");
if (!usedBy || usedBy.length === 0) {
return null;
}
const localized = usedBy.map((m) => {
return I18n.t(`discourse_ai.llms.usage.${m.type}`, {
persona: m.name,
});
});
// TODO: this is not perfectly localized
return localized.join(", ");
}
get seeded() {
@ -157,20 +169,6 @@ export default class AiLlmEditorForm extends Component {
});
}
@action
async toggleEnabledChatBot() {
this.args.model.set("enabled_chat_bot", !this.args.model.enabled_chat_bot);
if (!this.args.model.isNew) {
try {
await this.args.model.update({
enabled_chat_bot: this.args.model.enabled_chat_bot,
});
} catch (e) {
popupAjaxError(e);
}
}
}
<template>
{{#if this.seeded}}
<div class="alert alert-info">
@ -291,12 +289,12 @@ export default class AiLlmEditorForm extends Component {
@content={{I18n.t "discourse_ai.llms.hints.vision_enabled"}}
/>
</div>
<div class="control-group">
<DToggleSwitch
class="ai-llm-editor__enabled-chat-bot"
@state={{@model.enabled_chat_bot}}
@label="discourse_ai.llms.enabled_chat_bot"
{{on "click" this.toggleEnabledChatBot}}
<div class="control-group ai-llm-editor__enabled-chat-bot">
<Input @type="checkbox" @checked={{@model.enabled_chat_bot}} />
<label>{{I18n.t "discourse_ai.llms.enabled_chat_bot"}}</label>
<DTooltip
@icon="question-circle"
@content={{I18n.t "discourse_ai.llms.hints.enabled_chat_bot"}}
/>
</div>
{{#if @model.user}}

View File

@ -1,12 +1,9 @@
import Component from "@glimmer/component";
import { concat, fn } from "@ember/helper";
import { on } from "@ember/modifier";
import { action } from "@ember/object";
import { LinkTo } from "@ember/routing";
import { inject as service } from "@ember/service";
import DBreadcrumbsItem from "discourse/components/d-breadcrumbs-item";
import DToggleSwitch from "discourse/components/d-toggle-switch";
import { popupAjaxError } from "discourse/lib/ajax-error";
import icon from "discourse-common/helpers/d-icon";
import i18n from "discourse-common/helpers/i18n";
import I18n from "discourse-i18n";
@ -100,18 +97,13 @@ export default class AiLlmsListEditor extends Component {
});
}
@action
async toggleEnabledChatBot(llm) {
const oldValue = llm.enabled_chat_bot;
const newValue = !oldValue;
try {
llm.set("enabled_chat_bot", newValue);
await llm.update({
enabled_chat_bot: newValue,
localizeUsage(usage) {
if (usage.type === "ai_persona") {
return I18n.t("discourse_ai.llms.usage.ai_persona", {
persona: usage.name,
});
} catch (err) {
llm.set("enabled_chat_bot", oldValue);
popupAjaxError(err);
} else {
return I18n.t("discourse_ai.llms.usage." + usage.type);
}
}
@ -138,7 +130,6 @@ export default class AiLlmsListEditor extends Component {
<tr>
<th>{{i18n "discourse_ai.llms.display_name"}}</th>
<th>{{i18n "discourse_ai.llms.provider"}}</th>
<th>{{i18n "discourse_ai.llms.enabled_chat_bot"}}</th>
<th></th>
</tr>
</thead>
@ -150,18 +141,19 @@ export default class AiLlmsListEditor extends Component {
<p>
{{this.modelDescription llm}}
</p>
{{#if llm.used_by}}
<ul class="ai-llm-list-editor__usages">
{{#each llm.used_by as |usage|}}
<li>{{this.localizeUsage usage}}</li>
{{/each}}
</ul>
{{/if}}
</td>
<td>
{{i18n
(concat "discourse_ai.llms.providers." llm.provider)
}}
</td>
<td>
<DToggleSwitch
@state={{llm.enabled_chat_bot}}
{{on "click" (fn this.toggleEnabledChatBot llm)}}
/>
</td>
<td class="column-edit">
<LinkTo
@route="adminPlugins.show.discourse-ai-llms.show"

View File

@ -336,6 +336,7 @@ export default class PersonaEditor extends Component {
disabled={{this.editingModel.system}}
/>
</div>
{{#if this.editingModel.user}}
<div class="control-group">
<label>{{I18n.t "discourse_ai.ai_persona.default_llm"}}</label>
<AiLlmSelector
@ -358,6 +359,7 @@ export default class PersonaEditor extends Component {
{{I18n.t "discourse_ai.ai_persona.force_default_llm"}}</label>
</div>
{{/if}}
{{/if}}
{{#unless @model.isNew}}
<div class="control-group">
<label>{{I18n.t "discourse_ai.ai_persona.user"}}</label>

View File

@ -58,7 +58,13 @@ export default class BotSelector extends Component {
this.setAllowLLMSelector();
let llm = this.preferredLlmStore.getObject("id");
llm = llm || this.llmOptions[0].id;
const llmOption =
this.llmOptions.find((innerLlmOption) => innerLlmOption.id === llm) ||
this.llmOptions[0];
llm = llmOption.id;
if (llm) {
next(() => {
this.currentLlm = llm;
@ -96,6 +102,7 @@ export default class BotSelector extends Component {
this.preferredPersonaStore.setObject({ key: "id", value: newValue });
this.composer.metaData = { ai_persona_id: newValue };
this.setAllowLLMSelector();
this.resetTargetRecipients();
}
setAllowLLMSelector() {
@ -112,11 +119,16 @@ export default class BotSelector extends Component {
set currentLlm(newValue) {
this.llm = newValue;
this.preferredLlmStore.setObject({ key: "id", value: newValue });
this.resetTargetRecipients();
}
resetTargetRecipients() {
if (this.allowLLMSelector) {
const botUsername = this.currentUser.ai_enabled_chat_bots.find(
(bot) => bot.model_name === this.llm
).username;
this.preferredLlmStore.setObject({ key: "id", value: newValue });
if (this.allowLLMSelector) {
this.composer.set("targetRecipients", botUsername);
} else {
const persona = this.currentUser.ai_enabled_personas.find(

View File

@ -51,7 +51,8 @@
align-items: center;
}
&__vision-enabled {
&__vision-enabled,
&__enabled-chat-bot {
display: flex;
align-items: flex-start;
}
@ -150,3 +151,17 @@
letter-spacing: 0.1px;
}
}
.ai-llm-list-editor__usages {
list-style: none;
margin: 0.5em 0 0 0;
display: flex;
li {
font-size: var(--font-down-2);
border-radius: 0.25em;
background: var(--primary-very-low);
border: 1px solid var(--primary-low);
padding: 1px 3px;
margin-right: 0.5em;
}
}

View File

@ -252,7 +252,7 @@ en:
max_prompt_tokens: "Number of tokens for the prompt"
url: "URL of the service hosting the model"
api_key: "API Key of the service hosting the model"
enabled_chat_bot: "Allow AI Bot"
enabled_chat_bot: "Allow AI Bot selector"
vision_enabled: "Vision enabled"
ai_bot_user: "AI Bot User"
save: "Save"
@ -262,9 +262,14 @@ en:
confirm_delete: Are you sure you want to delete this model?
delete: Delete
seeded_warning: "This model is pre-configured on your site and cannot be edited."
usage:
ai_bot: "AI Bot"
ai_helper: "AI Helper"
ai_persona: "Persona (%{persona})"
ai_summarization: "Summarization"
in_use_warning:
one: "This model is currently used by the %{settings} setting. If misconfigured, the feature won't work as expected."
other: "This model is currently used by the following settings: %{settings}. If misconfigured, features won't work as expected. "
one: "This model is currently used by %{settings}. If misconfigured, the feature won't work as expected."
other: "This model is currently used by the following: %{settings}. If misconfigured, features won't work as expected. "
model_description:
none: "General settings that work for most language models"
@ -299,7 +304,7 @@ en:
max_prompt_tokens: "Max numbers of tokens for the prompt. As a rule of thumb, this should be 50% of the model's context window."
name: "We include this in the API call to specify which model we'll use."
vision_enabled: "If enabled, the AI will attempt to understand images. It depends on the model being used supporting vision. Supported by latest models from Anthropic, Google, and OpenAI."
enabled_chat_bot: "If enabled, users can select this model when creating PMs with the AI bot."
providers:
aws_bedrock: "AWS Bedrock"
anthropic: "Anthropic"

View File

@ -5,6 +5,42 @@ require "enum_site_setting"
module DiscourseAi
module Configuration
class LlmEnumerator < ::EnumSiteSetting
def self.global_usage
rval = Hash.new { |h, k| h[k] = [] }
if SiteSetting.ai_bot_enabled
LlmModel
.where("enabled_chat_bot = ?", true)
.pluck(:id)
.each { |llm_id| rval[llm_id] << { type: :ai_bot } }
AiPersona
.where("force_default_llm = ?", true)
.pluck(:default_llm, :name, :id)
.each do |llm_name, name, id|
llm_id = llm_name.split(":").last.to_i
rval[llm_id] << { type: :ai_persona, name: name, id: id }
end
end
if SiteSetting.ai_helper_enabled
model_id = SiteSetting.ai_helper_model.split(":").last.to_i
rval[model_id] << { type: :ai_helper }
end
if SiteSetting.ai_summarization_enabled
model_id = SiteSetting.ai_summarization_model.split(":").last.to_i
rval[model_id] << { type: :ai_summarization }
end
if SiteSetting.ai_embeddings_semantic_search_enabled
model_id = SiteSetting.ai_embeddings_semantic_search_hyde_model.split(":").last.to_i
rval[model_id] << { type: :ai_embeddings_semantic_search }
end
rval
end
def self.valid_value?(val)
true
end

View File

@ -9,6 +9,17 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
end
describe "GET #index" do
fab!(:llm_model) { Fabricate(:llm_model, enabled_chat_bot: true) }
fab!(:llm_model2) { Fabricate(:llm_model) }
fab!(:ai_persona) do
Fabricate(
:ai_persona,
name: "Cool persona",
force_default_llm: true,
default_llm: "custom:#{llm_model2.id}",
)
end
it "includes all available providers metadata" do
get "/admin/plugins/discourse-ai/ai-llms.json"
expect(response).to be_successful
@ -17,6 +28,44 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
*DiscourseAi::Completions::Llm.provider_names,
)
end
it "lists enabled features on appropriate LLMs" do
SiteSetting.ai_bot_enabled = true
# setting the setting calls the model
DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do
SiteSetting.ai_helper_model = "custom:#{llm_model.id}"
SiteSetting.ai_helper_enabled = true
end
DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do
SiteSetting.ai_summarization_model = "custom:#{llm_model2.id}"
SiteSetting.ai_summarization_enabled = true
end
DiscourseAi::Completions::Llm.with_prepared_responses(["OK"]) do
SiteSetting.ai_embeddings_semantic_search_hyde_model = "custom:#{llm_model2.id}"
SiteSetting.ai_embeddings_semantic_search_enabled = true
end
get "/admin/plugins/discourse-ai/ai-llms.json"
llms = response.parsed_body["ai_llms"]
model_json = llms.find { |m| m["id"] == llm_model.id }
expect(model_json["used_by"]).to contain_exactly(
{ "type" => "ai_bot" },
{ "type" => "ai_helper" },
)
model2_json = llms.find { |m| m["id"] == llm_model2.id }
expect(model2_json["used_by"]).to contain_exactly(
{ "type" => "ai_persona", "name" => "Cool persona", "id" => ai_persona.id },
{ "type" => "ai_summarization" },
{ "type" => "ai_embeddings_semantic_search" },
)
end
end
describe "POST #create" do

View File

@ -12,11 +12,8 @@ RSpec.describe "Managing LLM configurations", type: :system do
visit "/admin/plugins/discourse-ai/ai-llms"
find("[data-llm-id='anthropic-claude-3-haiku'] button").click()
find("input.ai-llm-editor__api-key").fill_in(with: "abcd")
PageObjects::Components::DToggleSwitch.new(".ai-llm-editor__enabled-chat-bot").toggle
find(".ai-llm-editor__enabled-chat-bot input").click
find(".ai-llm-editor__save").click()
expect(page).to have_current_path("/admin/plugins/discourse-ai/ai-llms")
@ -55,8 +52,7 @@ RSpec.describe "Managing LLM configurations", type: :system do
find(".select-kit-row[data-name=\"Llama3Tokenizer\"]").click
find(".ai-llm-editor__vision-enabled input").click
PageObjects::Components::DToggleSwitch.new(".ai-llm-editor__enabled-chat-bot").toggle
find(".ai-llm-editor__enabled-chat-bot input").click
find(".ai-llm-editor__save").click()