diff --git a/admin/assets/javascripts/discourse/routes/admin-plugins-show-discourse-ai-llms-new.js b/admin/assets/javascripts/discourse/routes/admin-plugins-show-discourse-ai-llms-new.js
index b4e8a795..0af43de4 100644
--- a/admin/assets/javascripts/discourse/routes/admin-plugins-show-discourse-ai-llms-new.js
+++ b/admin/assets/javascripts/discourse/routes/admin-plugins-show-discourse-ai-llms-new.js
@@ -1,6 +1,10 @@
import DiscourseRoute from "discourse/routes/discourse";
export default DiscourseRoute.extend({
+ queryParams: {
+ llmTemplate: { refreshModel: true },
+ },
+
async model() {
const record = this.store.createRecord("ai-llm");
record.provider_params = {};
@@ -13,5 +17,9 @@ export default DiscourseRoute.extend({
"allLlms",
this.modelFor("adminPlugins.show.discourse-ai-llms")
);
+ controller.set(
+ "llmTemplate",
+ this.paramsFor(this.routeName).llmTemplate || null
+ );
},
});
diff --git a/admin/assets/javascripts/discourse/templates/admin-plugins/show/discourse-ai-llms/new.hbs b/admin/assets/javascripts/discourse/templates/admin-plugins/show/discourse-ai-llms/new.hbs
index 77f3b0f3..cfc02f05 100644
--- a/admin/assets/javascripts/discourse/templates/admin-plugins/show/discourse-ai-llms/new.hbs
+++ b/admin/assets/javascripts/discourse/templates/admin-plugins/show/discourse-ai-llms/new.hbs
@@ -1 +1,5 @@
-
\ No newline at end of file
+
\ No newline at end of file
diff --git a/app/controllers/discourse_ai/admin/ai_llms_controller.rb b/app/controllers/discourse_ai/admin/ai_llms_controller.rb
index 243b0a2b..6505c174 100644
--- a/app/controllers/discourse_ai/admin/ai_llms_controller.rb
+++ b/app/controllers/discourse_ai/admin/ai_llms_controller.rb
@@ -36,7 +36,7 @@ module DiscourseAi
llm_model = LlmModel.new(ai_llm_params)
if llm_model.save
llm_model.toggle_companion_user
- render json: { ai_persona: LlmModelSerializer.new(llm_model) }, status: :created
+ render json: LlmModelSerializer.new(llm_model), status: :created
else
render_json_error llm_model
end
diff --git a/app/serializers/llm_model_serializer.rb b/app/serializers/llm_model_serializer.rb
index d515ca62..f4bce72e 100644
--- a/app/serializers/llm_model_serializer.rb
+++ b/app/serializers/llm_model_serializer.rb
@@ -1,8 +1,11 @@
# frozen_string_literal: true
class LlmModelSerializer < ApplicationSerializer
- root "llm"
-
+ # TODO: we probably should rename the table LlmModel to AiLlm
+ # it is consistent with AiPersona and AiTool
+ # LLM model is a bit confusing given that large langauge model model is a confusing
+ # name
+ root "ai_llm"
attributes :id,
:display_name,
:name,
diff --git a/assets/javascripts/discourse/components/ai-llm-editor-form.gjs b/assets/javascripts/discourse/components/ai-llm-editor-form.gjs
index 1281481c..df17e832 100644
--- a/assets/javascripts/discourse/components/ai-llm-editor-form.gjs
+++ b/assets/javascripts/discourse/components/ai-llm-editor-form.gjs
@@ -87,9 +87,7 @@ export default class AiLlmEditorForm extends Component {
const isNew = this.args.model.isNew;
try {
- const result = await this.args.model.save();
-
- this.args.model.setProperties(result.responseJson.ai_persona);
+ await this.args.model.save();
if (isNew) {
this.args.llms.addObject(this.args.model);
diff --git a/assets/javascripts/discourse/components/ai-llm-editor.gjs b/assets/javascripts/discourse/components/ai-llm-editor.gjs
index af9b4ff6..29beb1fc 100644
--- a/assets/javascripts/discourse/components/ai-llm-editor.gjs
+++ b/assets/javascripts/discourse/components/ai-llm-editor.gjs
@@ -1,49 +1,19 @@
import Component from "@glimmer/component";
-import { tracked } from "@glimmer/tracking";
import { action } from "@ember/object";
import BackButton from "discourse/components/back-button";
-import DButton from "discourse/components/d-button";
-import I18n from "discourse-i18n";
-import ComboBox from "select-kit/components/combo-box";
import AiLlmEditorForm from "./ai-llm-editor-form";
export default class AiLlmEditor extends Component {
- @tracked presetConfigured = false;
- presetId = "none";
-
- get showPresets() {
- return (
- this.args.model.isNew && !this.presetConfigured && !this.args.model.url
- );
- }
-
- get preConfiguredLlms() {
- let options = [
- {
- id: "none",
- name: I18n.t(`discourse_ai.llms.preconfigured.none`),
- },
- ];
-
- this.args.llms.resultSetMeta.presets.forEach((llm) => {
- if (llm.models) {
- llm.models.forEach((model) => {
- options.push({
- id: `${llm.id}-${model.name}`,
- name: model.display_name,
- });
- });
- }
- });
-
- return options;
+ constructor() {
+ super(...arguments);
+ if (this.args.llmTemplate) {
+ this.configurePreset();
+ }
}
@action
configurePreset() {
- this.presetConfigured = true;
-
- let [id, model] = this.presetId.split(/-(.*)/);
+ let [id, model] = this.args.llmTemplate.split(/-(.*)/);
if (id === "none") {
return;
}
@@ -66,25 +36,6 @@ export default class AiLlmEditor extends Component {
@route="adminPlugins.show.discourse-ai-llms"
@label="discourse_ai.llms.back"
/>
- {{#if this.showPresets}}
-
- {{else}}
-
- {{/if}}
+
}
diff --git a/assets/javascripts/discourse/components/ai-llms-list-editor.gjs b/assets/javascripts/discourse/components/ai-llms-list-editor.gjs
index 1a2e32a3..a00ed6fe 100644
--- a/assets/javascripts/discourse/components/ai-llms-list-editor.gjs
+++ b/assets/javascripts/discourse/components/ai-llms-list-editor.gjs
@@ -5,20 +5,100 @@ import { action } from "@ember/object";
import { LinkTo } from "@ember/routing";
import { inject as service } from "@ember/service";
import DBreadcrumbsItem from "discourse/components/d-breadcrumbs-item";
+import DButton from "discourse/components/d-button";
import DToggleSwitch from "discourse/components/d-toggle-switch";
import { popupAjaxError } from "discourse/lib/ajax-error";
import icon from "discourse-common/helpers/d-icon";
import i18n from "discourse-common/helpers/i18n";
import I18n from "discourse-i18n";
+import AdminPageSubheader from "admin/components/admin-page-subheader";
import AiLlmEditor from "./ai-llm-editor";
export default class AiLlmsListEditor extends Component {
@service adminPluginNavManager;
+ @service router;
- get hasLLMElements() {
+ @action
+ modelDescription(llm) {
+ // this is a bit of an odd object, it can be an llm model or a preset model
+ // handle both flavors
+
+ // in the case of model
+ let key = "";
+ if (typeof llm.id === "number") {
+ key = `${llm.provider}-${llm.name}`;
+ } else {
+ // case of preset
+ key = llm.id.replace(/\./g, "-");
+ }
+
+ key = `discourse_ai.llms.model_description.${key}`;
+ if (I18n.lookup(key, { ignoreMissing: true })) {
+ return I18n.t(key);
+ }
+ return "";
+ }
+
+ sanitizedTranslationKey(id) {
+ return id.replace(/\./g, "-");
+ }
+
+ get hasLlmElements() {
return this.args.llms.length !== 0;
}
+ get preconfiguredTitle() {
+ if (this.hasLlmElements) {
+ return "discourse_ai.llms.preconfigured.title";
+ } else {
+ return "discourse_ai.llms.preconfigured.title_no_llms";
+ }
+ }
+
+ get preConfiguredLlms() {
+ const options = [
+ {
+ id: "none",
+ name: I18n.t("discourse_ai.llms.preconfigured.fake"),
+ provider: "fake",
+ },
+ ];
+
+ const llmsContent = this.args.llms.content.map((llm) => ({
+ provider: llm.provider,
+ name: llm.name,
+ }));
+
+ this.args.llms.resultSetMeta.presets.forEach((llm) => {
+ if (llm.models) {
+ llm.models.forEach((model) => {
+ const id = `${llm.id}-${model.name}`;
+ const isConfigured = llmsContent.some(
+ (content) =>
+ content.provider === llm.provider && content.name === model.name
+ );
+
+ if (!isConfigured) {
+ options.push({
+ id,
+ name: model.display_name,
+ provider: llm.provider,
+ });
+ }
+ });
+ }
+ });
+
+ return options;
+ }
+
+ @action
+ transitionToLlmEditor(llmTemplate) {
+ this.router.transitionTo("adminPlugins.show.discourse-ai-llms.new", {
+ queryParams: { llmTemplate },
+ });
+ }
+
@action
async toggleEnabledChatBot(llm) {
const oldValue = llm.enabled_chat_bot;
@@ -39,60 +119,92 @@ export default class AiLlmsListEditor extends Component {
@path="/admin/plugins/{{this.adminPluginNavManager.currentPlugin.name}}/ai-llms"
@label={{i18n "discourse_ai.llms.short_title"}}
/>
-
-
+
{{#if @currentLlm}}
-
+
{{else}}
-
-
- {{#if this.hasLLMElements}}
-
-
-
- {{i18n "discourse_ai.llms.display_name"}} |
- {{i18n "discourse_ai.llms.provider"}} |
- {{i18n "discourse_ai.llms.enabled_chat_bot"}} |
- |
-
-
-
- {{#each @llms as |llm|}}
-
- {{llm.display_name}} |
- {{i18n
- (concat "discourse_ai.llms.providers." llm.provider)
- }} |
-
-
- |
-
- {{i18n "discourse_ai.llms.edit"}}
- |
+ {{#if this.hasLlmElements}}
+
+
{{/if}}
+
+
+
+ {{#each this.preConfiguredLlms as |llm|}}
+
+
+ {{i18n (concat "discourse_ai.llms.providers." llm.provider)}}
+
+
+ {{llm.name}}
+
+
+ {{this.modelDescription llm}}
+
+
+
+ {{/each}}
+
+
{{/if}}
diff --git a/assets/stylesheets/modules/llms/common/ai-llms-editor.scss b/assets/stylesheets/modules/llms/common/ai-llms-editor.scss
index cc43edb0..22faf6b6 100644
--- a/assets/stylesheets/modules/llms/common/ai-llms-editor.scss
+++ b/assets/stylesheets/modules/llms/common/ai-llms-editor.scss
@@ -4,10 +4,10 @@
justify-content: space-between;
align-items: center;
margin: 0 0 1em 0;
+ }
- h3 {
- margin: 0;
- }
+ &__configured + &__templates {
+ margin-top: 3em;
}
}
@@ -56,3 +56,104 @@
align-items: flex-start;
}
}
+
+[class*="ai-llms-list-editor"] {
+ .admin-page-subheader {
+ h3 {
+ font-size: var(--font-up-2);
+ margin: 0;
+ font-weight: bold;
+ }
+ }
+ h3 {
+ font-weight: normal;
+ margin: 0;
+ line-height: var(--line-height-medium);
+ }
+}
+
+.ai-llms-list-editor__configured {
+ p {
+ margin: 0;
+ color: var(--primary-high);
+ @include breakpoint("mobile-extra-large") {
+ display: none;
+ }
+ }
+
+ table {
+ th {
+ white-space: nowrap;
+ }
+ tr:hover {
+ background: transparent;
+ }
+ td {
+ padding: 1em 0.5em;
+ }
+ }
+
+ .column-name {
+ width: 100%;
+ }
+
+ .column-edit {
+ text-align: right;
+
+ @include breakpoint("mobile-extra-large") {
+ .d-button-label {
+ display: none;
+ }
+ .d-icon {
+ margin: 0;
+ }
+ }
+ }
+
+ .d-toggle-switch {
+ justify-content: center;
+ }
+}
+
+.ai-llms-list-editor__templates {
+ &-list {
+ display: grid;
+ grid-template-columns: repeat(auto-fill, minmax(16em, 1fr));
+ gap: 1em 2em;
+ margin-top: 1em;
+ border-top: 3px solid var(--primary-low); // matches tbody border
+ padding-top: 1em;
+ }
+ &-list-item {
+ display: grid;
+ grid-template-rows: subgrid;
+ grid-row: span 4;
+ gap: 0;
+ margin-bottom: 2em;
+ @include breakpoint("mobile-extra-large", min-width) {
+ margin-bottom: 3em;
+ }
+ }
+
+ p {
+ color: var(--primary-high);
+ margin: 0.25em 0 0.5em;
+ line-height: var(--line-height-large);
+ align-self: start;
+ @include breakpoint("mobile-extra-large", min-width) {
+ max-width: 17em;
+ }
+ }
+
+ button {
+ justify-self: start;
+ }
+
+ h4 {
+ font-size: var(--font-down-1);
+ font-weight: normal;
+ color: var(--primary-high);
+ margin: 0;
+ letter-spacing: 0.1px;
+ }
+}
diff --git a/config/locales/client.en.yml b/config/locales/client.en.yml
index faae0e05..bc19a9dc 100644
--- a/config/locales/client.en.yml
+++ b/config/locales/client.en.yml
@@ -226,9 +226,9 @@ en:
short_title: "LLMs"
no_llms: "No LLMs yet"
new: "New Model"
- display_name: "Name to display"
- name: "Model name"
- provider: "Service hosting the model"
+ display_name: "Name"
+ name: "Model id"
+ provider: "Provider"
tokenizer: "Tokenizer"
max_prompt_tokens: "Number of tokens for the prompt"
url: "URL of the service hosting the model"
@@ -247,9 +247,26 @@ en:
one: "This model is currently used by the %{settings} setting. If misconfigured, the feature won't work as expected."
other: "This model is currently used by the following settings: %{settings}. If misconfigured, features won't work as expected. "
+ model_description:
+ none: "General settings that work for most language models"
+ anthropic-claude-3-5-sonnet: "Anthropic's most intelligent model"
+ anthropic-claude-3-opus: "Excels at writing and complex tasks"
+ anthropic-claude-3-sonnet: "Balance of speed and intelligence"
+ anthropic-claude-3-haiku: "Fast and cost-effective"
+ google-gemini-1-5-pro: "Mid-sized multimodal model capable of a wide range of tasks"
+ google-gemini-1-5-flash: "Lightweight, fast, and cost-efficient with multimodal reasoning"
+ open_ai-gpt-4-turbo: "Previous generation high-intelligence model"
+ open_ai-gpt-4o: "High intelligence model for complex, multi-step tasks"
+ open_ai-gpt-4o-mini: "Affordable and fast small model for lightweight tasks"
+
+ configured:
+ title: "Configured LLMs"
preconfigured_llms: "Select your LLM"
preconfigured:
- none: "Configure manually..."
+ title_no_llms: "Select a template to get started"
+ title: "Unconfigured LLM templates"
+ fake: "Manual configuration"
+ button: "Set up"
next:
title: "Next"
@@ -276,6 +293,7 @@ en:
ollama: "Ollama"
CDCK: "CDCK"
samba_nova: "SambaNova"
+ fake: "Custom"
provider_fields:
access_key_id: "AWS Bedrock Access key ID"
diff --git a/spec/requests/admin/ai_llms_controller_spec.rb b/spec/requests/admin/ai_llms_controller_spec.rb
index 6abf4433..e136b1a8 100644
--- a/spec/requests/admin/ai_llms_controller_spec.rb
+++ b/spec/requests/admin/ai_llms_controller_spec.rb
@@ -35,14 +35,18 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
context "with valid attributes" do
it "creates a new LLM model" do
post "/admin/plugins/discourse-ai/ai-llms.json", params: { ai_llm: valid_attrs }
+ response_body = response.parsed_body
- created_model = LlmModel.last
+ created_model = response_body["ai_llm"]
- expect(created_model.display_name).to eq(valid_attrs[:display_name])
- expect(created_model.name).to eq(valid_attrs[:name])
- expect(created_model.provider).to eq(valid_attrs[:provider])
- expect(created_model.tokenizer).to eq(valid_attrs[:tokenizer])
- expect(created_model.max_prompt_tokens).to eq(valid_attrs[:max_prompt_tokens])
+ expect(created_model["display_name"]).to eq(valid_attrs[:display_name])
+ expect(created_model["name"]).to eq(valid_attrs[:name])
+ expect(created_model["provider"]).to eq(valid_attrs[:provider])
+ expect(created_model["tokenizer"]).to eq(valid_attrs[:tokenizer])
+ expect(created_model["max_prompt_tokens"]).to eq(valid_attrs[:max_prompt_tokens])
+
+ model = LlmModel.find(created_model["id"])
+ expect(model.display_name).to eq(valid_attrs[:display_name])
end
it "creates a companion user" do
diff --git a/spec/system/llms/ai_llm_spec.rb b/spec/system/llms/ai_llm_spec.rb
index 711231a3..65f4e02b 100644
--- a/spec/system/llms/ai_llm_spec.rb
+++ b/spec/system/llms/ai_llm_spec.rb
@@ -8,20 +8,10 @@ RSpec.describe "Managing LLM configurations", type: :system do
sign_in(admin)
end
- def select_preset(option)
- select_kit = PageObjects::Components::SelectKit.new(".ai-llm-editor__presets")
-
- select_kit.expand
- select_kit.select_row_by_value("anthropic-claude-3-haiku")
-
- find(".ai-llm-editor__next").click()
- end
-
it "correctly sets defaults" do
visit "/admin/plugins/discourse-ai/ai-llms"
- find(".ai-llms-list-editor__new").click()
- select_preset("anthropic-claude-3-haiku")
+ find("[data-llm-id='anthropic-claude-3-haiku'] button").click()
find("input.ai-llm-editor__api-key").fill_in(with: "abcd")
@@ -50,8 +40,7 @@ RSpec.describe "Managing LLM configurations", type: :system do
it "manually configures an LLM" do
visit "/admin/plugins/discourse-ai/ai-llms"
- find(".ai-llms-list-editor__new").click()
- select_preset("none")
+ find("[data-llm-id='none'] button").click()
find("input.ai-llm-editor__display-name").fill_in(with: "Self-hosted LLM")
find("input.ai-llm-editor__name").fill_in(with: "llava-hf/llava-v1.6-mistral-7b-hf")