diff --git a/assets/javascripts/discourse/components/ai-llm-editor-form.gjs b/assets/javascripts/discourse/components/ai-llm-editor-form.gjs index bb04ddd7..028662c4 100644 --- a/assets/javascripts/discourse/components/ai-llm-editor-form.gjs +++ b/assets/javascripts/discourse/components/ai-llm-editor-form.gjs @@ -55,7 +55,7 @@ export default class AiLlmEditorForm extends Component { @computed("args.model.provider") get canEditURL() { - return this.args.model.provider === "aws_bedrock"; + return this.args.model.provider !== "aws_bedrock"; } get modulesUsingModel() { @@ -202,6 +202,7 @@ export default class AiLlmEditorForm extends Component { {{#if this.canEditURL}} @@ -251,6 +252,7 @@ export default class AiLlmEditorForm extends Component {
diff --git a/spec/system/llms/ai_llm_spec.rb b/spec/system/llms/ai_llm_spec.rb index 2440f71e..711231a3 100644 --- a/spec/system/llms/ai_llm_spec.rb +++ b/spec/system/llms/ai_llm_spec.rb @@ -1,23 +1,28 @@ # frozen_string_literal: true -RSpec.describe "Admin dashboard", type: :system do +RSpec.describe "Managing LLM configurations", type: :system do fab!(:admin) - it "correctly sets defaults" do + before do SiteSetting.ai_bot_enabled = true - sign_in(admin) + end - visit "/admin/plugins/discourse-ai/ai-llms" - - find(".ai-llms-list-editor__new").click() - + def select_preset(option) select_kit = PageObjects::Components::SelectKit.new(".ai-llm-editor__presets") select_kit.expand select_kit.select_row_by_value("anthropic-claude-3-haiku") find(".ai-llm-editor__next").click() + end + + it "correctly sets defaults" do + visit "/admin/plugins/discourse-ai/ai-llms" + + find(".ai-llms-list-editor__new").click() + select_preset("anthropic-claude-3-haiku") + find("input.ai-llm-editor__api-key").fill_in(with: "abcd") PageObjects::Components::DToggleSwitch.new(".ai-llm-editor__enabled-chat-bot").toggle @@ -41,4 +46,42 @@ RSpec.describe "Admin dashboard", type: :system do expect(llm.display_name).to eq(model_preset[:display_name]) expect(llm.user_id).not_to be_nil end + + it "manually configures an LLM" do + visit "/admin/plugins/discourse-ai/ai-llms" + + find(".ai-llms-list-editor__new").click() + select_preset("none") + + find("input.ai-llm-editor__display-name").fill_in(with: "Self-hosted LLM") + find("input.ai-llm-editor__name").fill_in(with: "llava-hf/llava-v1.6-mistral-7b-hf") + find("input.ai-llm-editor__url").fill_in(with: "srv://self-hostest.test") + find("input.ai-llm-editor__api-key").fill_in(with: "1234") + find("input.ai-llm-editor__max-prompt-tokens").fill_in(with: 8000) + + find(".ai-llm-editor__provider").click + find(".select-kit-row[data-value=\"vllm\"]").click + + find(".ai-llm-editor__tokenizer").click + find(".select-kit-row[data-name=\"Llama3Tokenizer\"]").click + + find(".ai-llm-editor__vision-enabled input").click + + PageObjects::Components::DToggleSwitch.new(".ai-llm-editor__enabled-chat-bot").toggle + + find(".ai-llm-editor__save").click() + + expect(page).to have_current_path("/admin/plugins/discourse-ai/ai-llms") + + llm = LlmModel.order(:id).last + + expect(llm.display_name).to eq("Self-hosted LLM") + expect(llm.name).to eq("llava-hf/llava-v1.6-mistral-7b-hf") + expect(llm.url).to eq("srv://self-hostest.test") + expect(llm.tokenizer).to eq("DiscourseAi::Tokenizer::Llama3Tokenizer") + expect(llm.max_prompt_tokens.to_i).to eq(8000) + expect(llm.provider).to eq("vllm") + expect(llm.vision_enabled).to eq(true) + expect(llm.user_id).not_to be_nil + end end