2024-06-21 03:32:15 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2024-10-23 16:58:27 -04:00
|
|
|
RSpec.describe "Managing LLM configurations", type: :system, js: true do
|
2024-06-21 03:32:15 -04:00
|
|
|
fab!(:admin)
|
|
|
|
|
2024-08-22 10:31:28 -04:00
|
|
|
before do
|
2024-06-23 19:59:42 -04:00
|
|
|
SiteSetting.ai_bot_enabled = true
|
2024-06-21 03:32:15 -04:00
|
|
|
sign_in(admin)
|
2024-08-22 10:31:28 -04:00
|
|
|
end
|
2024-06-21 03:32:15 -04:00
|
|
|
|
2024-08-22 10:31:28 -04:00
|
|
|
it "correctly sets defaults" do
|
|
|
|
visit "/admin/plugins/discourse-ai/ai-llms"
|
|
|
|
|
2024-09-30 03:15:11 -04:00
|
|
|
find("[data-llm-id='anthropic-claude-3-haiku'] button").click()
|
2024-06-21 03:32:15 -04:00
|
|
|
find("input.ai-llm-editor__api-key").fill_in(with: "abcd")
|
2024-10-21 20:16:02 -04:00
|
|
|
find(".ai-llm-editor__enabled-chat-bot input").click
|
2024-06-21 03:32:15 -04:00
|
|
|
find(".ai-llm-editor__save").click()
|
|
|
|
|
|
|
|
expect(page).to have_current_path("/admin/plugins/discourse-ai/ai-llms")
|
|
|
|
|
|
|
|
llm = LlmModel.order(:id).last
|
|
|
|
expect(llm.api_key).to eq("abcd")
|
|
|
|
|
|
|
|
preset = DiscourseAi::Completions::Llm.presets.find { |p| p[:id] == "anthropic" }
|
|
|
|
|
|
|
|
model_preset = preset[:models].find { |m| m[:name] == "claude-3-haiku" }
|
|
|
|
|
|
|
|
expect(llm.name).to eq("claude-3-haiku")
|
|
|
|
expect(llm.url).to eq(preset[:endpoint])
|
|
|
|
expect(llm.tokenizer).to eq(preset[:tokenizer].to_s)
|
|
|
|
expect(llm.max_prompt_tokens.to_i).to eq(model_preset[:tokens])
|
|
|
|
expect(llm.provider).to eq("anthropic")
|
|
|
|
expect(llm.display_name).to eq(model_preset[:display_name])
|
2024-06-23 19:59:42 -04:00
|
|
|
expect(llm.user_id).not_to be_nil
|
2024-06-21 03:32:15 -04:00
|
|
|
end
|
2024-08-22 10:31:28 -04:00
|
|
|
|
|
|
|
it "manually configures an LLM" do
|
|
|
|
visit "/admin/plugins/discourse-ai/ai-llms"
|
|
|
|
|
2024-09-30 03:15:11 -04:00
|
|
|
find("[data-llm-id='none'] button").click()
|
2024-08-22 10:31:28 -04:00
|
|
|
|
|
|
|
find("input.ai-llm-editor__display-name").fill_in(with: "Self-hosted LLM")
|
|
|
|
find("input.ai-llm-editor__name").fill_in(with: "llava-hf/llava-v1.6-mistral-7b-hf")
|
|
|
|
find("input.ai-llm-editor__url").fill_in(with: "srv://self-hostest.test")
|
|
|
|
find("input.ai-llm-editor__api-key").fill_in(with: "1234")
|
|
|
|
find("input.ai-llm-editor__max-prompt-tokens").fill_in(with: 8000)
|
|
|
|
|
|
|
|
find(".ai-llm-editor__provider").click
|
|
|
|
find(".select-kit-row[data-value=\"vllm\"]").click
|
|
|
|
|
|
|
|
find(".ai-llm-editor__tokenizer").click
|
|
|
|
find(".select-kit-row[data-name=\"Llama3Tokenizer\"]").click
|
|
|
|
|
|
|
|
find(".ai-llm-editor__vision-enabled input").click
|
2024-10-21 20:16:02 -04:00
|
|
|
find(".ai-llm-editor__enabled-chat-bot input").click
|
2024-08-22 10:31:28 -04:00
|
|
|
|
|
|
|
find(".ai-llm-editor__save").click()
|
|
|
|
|
|
|
|
expect(page).to have_current_path("/admin/plugins/discourse-ai/ai-llms")
|
|
|
|
|
|
|
|
llm = LlmModel.order(:id).last
|
|
|
|
|
|
|
|
expect(llm.display_name).to eq("Self-hosted LLM")
|
|
|
|
expect(llm.name).to eq("llava-hf/llava-v1.6-mistral-7b-hf")
|
|
|
|
expect(llm.url).to eq("srv://self-hostest.test")
|
|
|
|
expect(llm.tokenizer).to eq("DiscourseAi::Tokenizer::Llama3Tokenizer")
|
|
|
|
expect(llm.max_prompt_tokens.to_i).to eq(8000)
|
|
|
|
expect(llm.provider).to eq("vllm")
|
|
|
|
expect(llm.vision_enabled).to eq(true)
|
|
|
|
expect(llm.user_id).not_to be_nil
|
|
|
|
end
|
2024-10-23 16:58:27 -04:00
|
|
|
|
|
|
|
context "when seeded LLM is present" do
|
|
|
|
fab!(:llm_model) { Fabricate(:seeded_model) }
|
|
|
|
|
|
|
|
it "shows the provider as CDCK in the UI" do
|
|
|
|
visit "/admin/plugins/discourse-ai/ai-llms"
|
|
|
|
expect(page).to have_css(
|
|
|
|
"[data-llm-id='cdck-hosted'] .column-provider",
|
|
|
|
text: I18n.t("js.discourse_ai.llms.providers.CDCK"),
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "shows an info alert to the user about the seeded LLM" do
|
|
|
|
visit "/admin/plugins/discourse-ai/ai-llms"
|
|
|
|
find("[data-llm-id='#{llm_model.name}'] .column-edit .btn").click()
|
|
|
|
expect(page).to have_css(
|
|
|
|
".alert.alert-info",
|
|
|
|
text: I18n.t("js.discourse_ai.llms.seeded_warning"),
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
it "limits and shows disabled inputs for the seeded LLM" do
|
|
|
|
visit "/admin/plugins/discourse-ai/ai-llms"
|
|
|
|
find("[data-llm-id='cdck-hosted'] .column-edit .btn").click()
|
|
|
|
expect(page).to have_css(".ai-llm-editor__display-name[disabled]")
|
|
|
|
expect(page).to have_css(".ai-llm-editor__name[disabled]")
|
|
|
|
expect(page).to have_css(".ai-llm-editor__provider.is-disabled")
|
|
|
|
end
|
|
|
|
end
|
2024-06-21 03:32:15 -04:00
|
|
|
end
|