UX: move templates to main LLM config tab, restyle (#813)

Restructures LLM config page so it is far clearer. 

Also corrects bugs around adding LLMs and having LLMs not editable post addition 
---------

Co-authored-by: Sam Saffron <sam.saffron@gmail.com>
This commit is contained in:
Kris 2024-09-30 03:15:11 -04:00 committed by GitHub
parent 1002dc877d
commit 18ecc843e5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 328 additions and 140 deletions

View File

@ -1,6 +1,10 @@
import DiscourseRoute from "discourse/routes/discourse"; import DiscourseRoute from "discourse/routes/discourse";
export default DiscourseRoute.extend({ export default DiscourseRoute.extend({
queryParams: {
llmTemplate: { refreshModel: true },
},
async model() { async model() {
const record = this.store.createRecord("ai-llm"); const record = this.store.createRecord("ai-llm");
record.provider_params = {}; record.provider_params = {};
@ -13,5 +17,9 @@ export default DiscourseRoute.extend({
"allLlms", "allLlms",
this.modelFor("adminPlugins.show.discourse-ai-llms") this.modelFor("adminPlugins.show.discourse-ai-llms")
); );
controller.set(
"llmTemplate",
this.paramsFor(this.routeName).llmTemplate || null
);
}, },
}); });

View File

@ -1 +1,5 @@
<AiLlmsListEditor @llms={{this.allLlms}} @currentLlm={{this.model}} /> <AiLlmsListEditor
@llms={{this.allLlms}}
@currentLlm={{this.model}}
@llmTemplate={{this.llmTemplate}}
/>

View File

@ -36,7 +36,7 @@ module DiscourseAi
llm_model = LlmModel.new(ai_llm_params) llm_model = LlmModel.new(ai_llm_params)
if llm_model.save if llm_model.save
llm_model.toggle_companion_user llm_model.toggle_companion_user
render json: { ai_persona: LlmModelSerializer.new(llm_model) }, status: :created render json: LlmModelSerializer.new(llm_model), status: :created
else else
render_json_error llm_model render_json_error llm_model
end end

View File

@ -1,8 +1,11 @@
# frozen_string_literal: true # frozen_string_literal: true
class LlmModelSerializer < ApplicationSerializer class LlmModelSerializer < ApplicationSerializer
root "llm" # TODO: we probably should rename the table LlmModel to AiLlm
# it is consistent with AiPersona and AiTool
# LLM model is a bit confusing given that large langauge model model is a confusing
# name
root "ai_llm"
attributes :id, attributes :id,
:display_name, :display_name,
:name, :name,

View File

@ -87,9 +87,7 @@ export default class AiLlmEditorForm extends Component {
const isNew = this.args.model.isNew; const isNew = this.args.model.isNew;
try { try {
const result = await this.args.model.save(); await this.args.model.save();
this.args.model.setProperties(result.responseJson.ai_persona);
if (isNew) { if (isNew) {
this.args.llms.addObject(this.args.model); this.args.llms.addObject(this.args.model);

View File

@ -1,49 +1,19 @@
import Component from "@glimmer/component"; import Component from "@glimmer/component";
import { tracked } from "@glimmer/tracking";
import { action } from "@ember/object"; import { action } from "@ember/object";
import BackButton from "discourse/components/back-button"; import BackButton from "discourse/components/back-button";
import DButton from "discourse/components/d-button";
import I18n from "discourse-i18n";
import ComboBox from "select-kit/components/combo-box";
import AiLlmEditorForm from "./ai-llm-editor-form"; import AiLlmEditorForm from "./ai-llm-editor-form";
export default class AiLlmEditor extends Component { export default class AiLlmEditor extends Component {
@tracked presetConfigured = false; constructor() {
presetId = "none"; super(...arguments);
if (this.args.llmTemplate) {
get showPresets() { this.configurePreset();
return ( }
this.args.model.isNew && !this.presetConfigured && !this.args.model.url
);
}
get preConfiguredLlms() {
let options = [
{
id: "none",
name: I18n.t(`discourse_ai.llms.preconfigured.none`),
},
];
this.args.llms.resultSetMeta.presets.forEach((llm) => {
if (llm.models) {
llm.models.forEach((model) => {
options.push({
id: `${llm.id}-${model.name}`,
name: model.display_name,
});
});
}
});
return options;
} }
@action @action
configurePreset() { configurePreset() {
this.presetConfigured = true; let [id, model] = this.args.llmTemplate.split(/-(.*)/);
let [id, model] = this.presetId.split(/-(.*)/);
if (id === "none") { if (id === "none") {
return; return;
} }
@ -66,25 +36,6 @@ export default class AiLlmEditor extends Component {
@route="adminPlugins.show.discourse-ai-llms" @route="adminPlugins.show.discourse-ai-llms"
@label="discourse_ai.llms.back" @label="discourse_ai.llms.back"
/> />
{{#if this.showPresets}} <AiLlmEditorForm @model={{@model}} @llms={{@llms}} />
<form class="form-horizontal ai-llm-editor">
<div class="control-group">
<label>{{I18n.t "discourse_ai.llms.preconfigured_llms"}}</label>
<ComboBox
@value={{this.presetId}}
@content={{this.preConfiguredLlms}}
class="ai-llm-editor__presets"
/>
</div>
<div class="control-group ai-llm-editor__action_panel">
<DButton class="ai-llm-editor__next" @action={{this.configurePreset}}>
{{I18n.t "discourse_ai.llms.next.title"}}
</DButton>
</div>
</form>
{{else}}
<AiLlmEditorForm @model={{@model}} @llms={{@llms}} />
{{/if}}
</template> </template>
} }

View File

@ -5,20 +5,100 @@ import { action } from "@ember/object";
import { LinkTo } from "@ember/routing"; import { LinkTo } from "@ember/routing";
import { inject as service } from "@ember/service"; import { inject as service } from "@ember/service";
import DBreadcrumbsItem from "discourse/components/d-breadcrumbs-item"; import DBreadcrumbsItem from "discourse/components/d-breadcrumbs-item";
import DButton from "discourse/components/d-button";
import DToggleSwitch from "discourse/components/d-toggle-switch"; import DToggleSwitch from "discourse/components/d-toggle-switch";
import { popupAjaxError } from "discourse/lib/ajax-error"; import { popupAjaxError } from "discourse/lib/ajax-error";
import icon from "discourse-common/helpers/d-icon"; import icon from "discourse-common/helpers/d-icon";
import i18n from "discourse-common/helpers/i18n"; import i18n from "discourse-common/helpers/i18n";
import I18n from "discourse-i18n"; import I18n from "discourse-i18n";
import AdminPageSubheader from "admin/components/admin-page-subheader";
import AiLlmEditor from "./ai-llm-editor"; import AiLlmEditor from "./ai-llm-editor";
export default class AiLlmsListEditor extends Component { export default class AiLlmsListEditor extends Component {
@service adminPluginNavManager; @service adminPluginNavManager;
@service router;
get hasLLMElements() { @action
modelDescription(llm) {
// this is a bit of an odd object, it can be an llm model or a preset model
// handle both flavors
// in the case of model
let key = "";
if (typeof llm.id === "number") {
key = `${llm.provider}-${llm.name}`;
} else {
// case of preset
key = llm.id.replace(/\./g, "-");
}
key = `discourse_ai.llms.model_description.${key}`;
if (I18n.lookup(key, { ignoreMissing: true })) {
return I18n.t(key);
}
return "";
}
sanitizedTranslationKey(id) {
return id.replace(/\./g, "-");
}
get hasLlmElements() {
return this.args.llms.length !== 0; return this.args.llms.length !== 0;
} }
get preconfiguredTitle() {
if (this.hasLlmElements) {
return "discourse_ai.llms.preconfigured.title";
} else {
return "discourse_ai.llms.preconfigured.title_no_llms";
}
}
get preConfiguredLlms() {
const options = [
{
id: "none",
name: I18n.t("discourse_ai.llms.preconfigured.fake"),
provider: "fake",
},
];
const llmsContent = this.args.llms.content.map((llm) => ({
provider: llm.provider,
name: llm.name,
}));
this.args.llms.resultSetMeta.presets.forEach((llm) => {
if (llm.models) {
llm.models.forEach((model) => {
const id = `${llm.id}-${model.name}`;
const isConfigured = llmsContent.some(
(content) =>
content.provider === llm.provider && content.name === model.name
);
if (!isConfigured) {
options.push({
id,
name: model.display_name,
provider: llm.provider,
});
}
});
}
});
return options;
}
@action
transitionToLlmEditor(llmTemplate) {
this.router.transitionTo("adminPlugins.show.discourse-ai-llms.new", {
queryParams: { llmTemplate },
});
}
@action @action
async toggleEnabledChatBot(llm) { async toggleEnabledChatBot(llm) {
const oldValue = llm.enabled_chat_bot; const oldValue = llm.enabled_chat_bot;
@ -39,60 +119,92 @@ export default class AiLlmsListEditor extends Component {
@path="/admin/plugins/{{this.adminPluginNavManager.currentPlugin.name}}/ai-llms" @path="/admin/plugins/{{this.adminPluginNavManager.currentPlugin.name}}/ai-llms"
@label={{i18n "discourse_ai.llms.short_title"}} @label={{i18n "discourse_ai.llms.short_title"}}
/> />
<section class="ai-llms-list-editor admin-detail pull-left"> <section class="ai-llm-list-editor admin-detail">
{{#if @currentLlm}} {{#if @currentLlm}}
<AiLlmEditor @model={{@currentLlm}} @llms={{@llms}} /> <AiLlmEditor
@model={{@currentLlm}}
@llms={{@llms}}
@llmTemplate={{@llmTemplate}}
/>
{{else}} {{else}}
<div class="ai-llms-list-editor__header"> {{#if this.hasLlmElements}}
<h3>{{i18n "discourse_ai.llms.short_title"}}</h3> <section class="ai-llms-list-editor__configured">
{{#unless @currentLlm.isNew}} <AdminPageSubheader
<LinkTo @titleLabel="discourse_ai.llms.configured.title"
@route="adminPlugins.show.discourse-ai-llms.new" />
class="btn btn-small btn-primary ai-llms-list-editor__new" <table>
> <thead>
{{icon "plus"}} <tr>
<span>{{I18n.t "discourse_ai.llms.new"}}</span> <th>{{i18n "discourse_ai.llms.display_name"}}</th>
</LinkTo> <th>{{i18n "discourse_ai.llms.provider"}}</th>
{{/unless}} <th>{{i18n "discourse_ai.llms.enabled_chat_bot"}}</th>
</div> <th></th>
{{#if this.hasLLMElements}}
<table class="content-list ai-persona-list-editor">
<thead>
<tr>
<th>{{i18n "discourse_ai.llms.display_name"}}</th>
<th>{{i18n "discourse_ai.llms.provider"}}</th>
<th>{{i18n "discourse_ai.llms.enabled_chat_bot"}}</th>
<th></th>
</tr>
</thead>
<tbody>
{{#each @llms as |llm|}}
<tr data-persona-id={{llm.id}} class="ai-llm-list__row">
<td><strong>{{llm.display_name}}</strong></td>
<td>{{i18n
(concat "discourse_ai.llms.providers." llm.provider)
}}</td>
<td>
<DToggleSwitch
@state={{llm.enabled_chat_bot}}
{{on "click" (fn this.toggleEnabledChatBot llm)}}
/>
</td>
<td>
<LinkTo
@route="adminPlugins.show.discourse-ai-llms.show"
current-when="true"
class="btn btn-text btn-small"
@model={{llm}}
>{{i18n "discourse_ai.llms.edit"}}</LinkTo>
</td>
</tr> </tr>
{{/each}} </thead>
</tbody> <tbody>
</table> {{#each @llms as |llm|}}
<tr data-persona-id={{llm.id}} class="ai-llm-list__row">
<td class="column-name">
<h3>{{llm.display_name}}</h3>
<p>
{{this.modelDescription llm}}
</p>
</td>
<td>
{{i18n
(concat "discourse_ai.llms.providers." llm.provider)
}}
</td>
<td>
<DToggleSwitch
@state={{llm.enabled_chat_bot}}
{{on "click" (fn this.toggleEnabledChatBot llm)}}
/>
</td>
<td class="column-edit">
<LinkTo
@route="adminPlugins.show.discourse-ai-llms.show"
class="btn btn-default"
@model={{llm.id}}
>
{{icon "wrench"}}
<div class="d-button-label">
{{i18n "discourse_ai.llms.edit"}}
</div>
</LinkTo>
</td>
</tr>
{{/each}}
</tbody>
</table>
</section>
{{/if}} {{/if}}
<section class="ai-llms-list-editor__templates">
<AdminPageSubheader @titleLabel={{this.preconfiguredTitle}} />
<div class="ai-llms-list-editor__templates-list">
{{#each this.preConfiguredLlms as |llm|}}
<div
data-llm-id={{llm.id}}
class="ai-llms-list-editor__templates-list-item"
>
<h4>
{{i18n (concat "discourse_ai.llms.providers." llm.provider)}}
</h4>
<h3>
{{llm.name}}
</h3>
<p>
{{this.modelDescription llm}}
</p>
<DButton
@action={{fn this.transitionToLlmEditor llm.id}}
@icon="gear"
@label="discourse_ai.llms.preconfigured.button"
/>
</div>
{{/each}}
</div>
</section>
{{/if}} {{/if}}
</section> </section>
</template> </template>

View File

@ -4,10 +4,10 @@
justify-content: space-between; justify-content: space-between;
align-items: center; align-items: center;
margin: 0 0 1em 0; margin: 0 0 1em 0;
}
h3 { &__configured + &__templates {
margin: 0; margin-top: 3em;
}
} }
} }
@ -56,3 +56,104 @@
align-items: flex-start; align-items: flex-start;
} }
} }
[class*="ai-llms-list-editor"] {
.admin-page-subheader {
h3 {
font-size: var(--font-up-2);
margin: 0;
font-weight: bold;
}
}
h3 {
font-weight: normal;
margin: 0;
line-height: var(--line-height-medium);
}
}
.ai-llms-list-editor__configured {
p {
margin: 0;
color: var(--primary-high);
@include breakpoint("mobile-extra-large") {
display: none;
}
}
table {
th {
white-space: nowrap;
}
tr:hover {
background: transparent;
}
td {
padding: 1em 0.5em;
}
}
.column-name {
width: 100%;
}
.column-edit {
text-align: right;
@include breakpoint("mobile-extra-large") {
.d-button-label {
display: none;
}
.d-icon {
margin: 0;
}
}
}
.d-toggle-switch {
justify-content: center;
}
}
.ai-llms-list-editor__templates {
&-list {
display: grid;
grid-template-columns: repeat(auto-fill, minmax(16em, 1fr));
gap: 1em 2em;
margin-top: 1em;
border-top: 3px solid var(--primary-low); // matches tbody border
padding-top: 1em;
}
&-list-item {
display: grid;
grid-template-rows: subgrid;
grid-row: span 4;
gap: 0;
margin-bottom: 2em;
@include breakpoint("mobile-extra-large", min-width) {
margin-bottom: 3em;
}
}
p {
color: var(--primary-high);
margin: 0.25em 0 0.5em;
line-height: var(--line-height-large);
align-self: start;
@include breakpoint("mobile-extra-large", min-width) {
max-width: 17em;
}
}
button {
justify-self: start;
}
h4 {
font-size: var(--font-down-1);
font-weight: normal;
color: var(--primary-high);
margin: 0;
letter-spacing: 0.1px;
}
}

View File

@ -226,9 +226,9 @@ en:
short_title: "LLMs" short_title: "LLMs"
no_llms: "No LLMs yet" no_llms: "No LLMs yet"
new: "New Model" new: "New Model"
display_name: "Name to display" display_name: "Name"
name: "Model name" name: "Model id"
provider: "Service hosting the model" provider: "Provider"
tokenizer: "Tokenizer" tokenizer: "Tokenizer"
max_prompt_tokens: "Number of tokens for the prompt" max_prompt_tokens: "Number of tokens for the prompt"
url: "URL of the service hosting the model" url: "URL of the service hosting the model"
@ -247,9 +247,26 @@ en:
one: "This model is currently used by the %{settings} setting. If misconfigured, the feature won't work as expected." one: "This model is currently used by the %{settings} setting. If misconfigured, the feature won't work as expected."
other: "This model is currently used by the following settings: %{settings}. If misconfigured, features won't work as expected. " other: "This model is currently used by the following settings: %{settings}. If misconfigured, features won't work as expected. "
model_description:
none: "General settings that work for most language models"
anthropic-claude-3-5-sonnet: "Anthropic's most intelligent model"
anthropic-claude-3-opus: "Excels at writing and complex tasks"
anthropic-claude-3-sonnet: "Balance of speed and intelligence"
anthropic-claude-3-haiku: "Fast and cost-effective"
google-gemini-1-5-pro: "Mid-sized multimodal model capable of a wide range of tasks"
google-gemini-1-5-flash: "Lightweight, fast, and cost-efficient with multimodal reasoning"
open_ai-gpt-4-turbo: "Previous generation high-intelligence model"
open_ai-gpt-4o: "High intelligence model for complex, multi-step tasks"
open_ai-gpt-4o-mini: "Affordable and fast small model for lightweight tasks"
configured:
title: "Configured LLMs"
preconfigured_llms: "Select your LLM" preconfigured_llms: "Select your LLM"
preconfigured: preconfigured:
none: "Configure manually..." title_no_llms: "Select a template to get started"
title: "Unconfigured LLM templates"
fake: "Manual configuration"
button: "Set up"
next: next:
title: "Next" title: "Next"
@ -276,6 +293,7 @@ en:
ollama: "Ollama" ollama: "Ollama"
CDCK: "CDCK" CDCK: "CDCK"
samba_nova: "SambaNova" samba_nova: "SambaNova"
fake: "Custom"
provider_fields: provider_fields:
access_key_id: "AWS Bedrock Access key ID" access_key_id: "AWS Bedrock Access key ID"

View File

@ -35,14 +35,18 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
context "with valid attributes" do context "with valid attributes" do
it "creates a new LLM model" do it "creates a new LLM model" do
post "/admin/plugins/discourse-ai/ai-llms.json", params: { ai_llm: valid_attrs } post "/admin/plugins/discourse-ai/ai-llms.json", params: { ai_llm: valid_attrs }
response_body = response.parsed_body
created_model = LlmModel.last created_model = response_body["ai_llm"]
expect(created_model.display_name).to eq(valid_attrs[:display_name]) expect(created_model["display_name"]).to eq(valid_attrs[:display_name])
expect(created_model.name).to eq(valid_attrs[:name]) expect(created_model["name"]).to eq(valid_attrs[:name])
expect(created_model.provider).to eq(valid_attrs[:provider]) expect(created_model["provider"]).to eq(valid_attrs[:provider])
expect(created_model.tokenizer).to eq(valid_attrs[:tokenizer]) expect(created_model["tokenizer"]).to eq(valid_attrs[:tokenizer])
expect(created_model.max_prompt_tokens).to eq(valid_attrs[:max_prompt_tokens]) expect(created_model["max_prompt_tokens"]).to eq(valid_attrs[:max_prompt_tokens])
model = LlmModel.find(created_model["id"])
expect(model.display_name).to eq(valid_attrs[:display_name])
end end
it "creates a companion user" do it "creates a companion user" do

View File

@ -8,20 +8,10 @@ RSpec.describe "Managing LLM configurations", type: :system do
sign_in(admin) sign_in(admin)
end end
def select_preset(option)
select_kit = PageObjects::Components::SelectKit.new(".ai-llm-editor__presets")
select_kit.expand
select_kit.select_row_by_value("anthropic-claude-3-haiku")
find(".ai-llm-editor__next").click()
end
it "correctly sets defaults" do it "correctly sets defaults" do
visit "/admin/plugins/discourse-ai/ai-llms" visit "/admin/plugins/discourse-ai/ai-llms"
find(".ai-llms-list-editor__new").click() find("[data-llm-id='anthropic-claude-3-haiku'] button").click()
select_preset("anthropic-claude-3-haiku")
find("input.ai-llm-editor__api-key").fill_in(with: "abcd") find("input.ai-llm-editor__api-key").fill_in(with: "abcd")
@ -50,8 +40,7 @@ RSpec.describe "Managing LLM configurations", type: :system do
it "manually configures an LLM" do it "manually configures an LLM" do
visit "/admin/plugins/discourse-ai/ai-llms" visit "/admin/plugins/discourse-ai/ai-llms"
find(".ai-llms-list-editor__new").click() find("[data-llm-id='none'] button").click()
select_preset("none")
find("input.ai-llm-editor__display-name").fill_in(with: "Self-hosted LLM") find("input.ai-llm-editor__display-name").fill_in(with: "Self-hosted LLM")
find("input.ai-llm-editor__name").fill_in(with: "llava-hf/llava-v1.6-mistral-7b-hf") find("input.ai-llm-editor__name").fill_in(with: "llava-hf/llava-v1.6-mistral-7b-hf")