FIX: urlEditable must be true for all providers except Bedrock (#766)

This commit is contained in:
Roman Rizzi 2024-08-22 11:31:28 -03:00 committed by GitHub
parent 3c7bd9bbd3
commit 9019e90b87
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 53 additions and 8 deletions

View File

@ -55,7 +55,7 @@ export default class AiLlmEditorForm extends Component {
@computed("args.model.provider") @computed("args.model.provider")
get canEditURL() { get canEditURL() {
return this.args.model.provider === "aws_bedrock"; return this.args.model.provider !== "aws_bedrock";
} }
get modulesUsingModel() { get modulesUsingModel() {
@ -202,6 +202,7 @@ export default class AiLlmEditorForm extends Component {
<ComboBox <ComboBox
@value={{@model.provider}} @value={{@model.provider}}
@content={{this.selectedProviders}} @content={{this.selectedProviders}}
@class="ai-llm-editor__provider"
/> />
</div> </div>
{{#if this.canEditURL}} {{#if this.canEditURL}}
@ -251,6 +252,7 @@ export default class AiLlmEditorForm extends Component {
<ComboBox <ComboBox
@value={{@model.tokenizer}} @value={{@model.tokenizer}}
@content={{@llms.resultSetMeta.tokenizers}} @content={{@llms.resultSetMeta.tokenizers}}
@class="ai-llm-editor__tokenizer"
/> />
</div> </div>
<div class="control-group"> <div class="control-group">

View File

@ -1,23 +1,28 @@
# frozen_string_literal: true # frozen_string_literal: true
RSpec.describe "Admin dashboard", type: :system do RSpec.describe "Managing LLM configurations", type: :system do
fab!(:admin) fab!(:admin)
it "correctly sets defaults" do before do
SiteSetting.ai_bot_enabled = true SiteSetting.ai_bot_enabled = true
sign_in(admin) sign_in(admin)
end
visit "/admin/plugins/discourse-ai/ai-llms" def select_preset(option)
find(".ai-llms-list-editor__new").click()
select_kit = PageObjects::Components::SelectKit.new(".ai-llm-editor__presets") select_kit = PageObjects::Components::SelectKit.new(".ai-llm-editor__presets")
select_kit.expand select_kit.expand
select_kit.select_row_by_value("anthropic-claude-3-haiku") select_kit.select_row_by_value("anthropic-claude-3-haiku")
find(".ai-llm-editor__next").click() find(".ai-llm-editor__next").click()
end
it "correctly sets defaults" do
visit "/admin/plugins/discourse-ai/ai-llms"
find(".ai-llms-list-editor__new").click()
select_preset("anthropic-claude-3-haiku")
find("input.ai-llm-editor__api-key").fill_in(with: "abcd") find("input.ai-llm-editor__api-key").fill_in(with: "abcd")
PageObjects::Components::DToggleSwitch.new(".ai-llm-editor__enabled-chat-bot").toggle PageObjects::Components::DToggleSwitch.new(".ai-llm-editor__enabled-chat-bot").toggle
@ -41,4 +46,42 @@ RSpec.describe "Admin dashboard", type: :system do
expect(llm.display_name).to eq(model_preset[:display_name]) expect(llm.display_name).to eq(model_preset[:display_name])
expect(llm.user_id).not_to be_nil expect(llm.user_id).not_to be_nil
end end
it "manually configures an LLM" do
visit "/admin/plugins/discourse-ai/ai-llms"
find(".ai-llms-list-editor__new").click()
select_preset("none")
find("input.ai-llm-editor__display-name").fill_in(with: "Self-hosted LLM")
find("input.ai-llm-editor__name").fill_in(with: "llava-hf/llava-v1.6-mistral-7b-hf")
find("input.ai-llm-editor__url").fill_in(with: "srv://self-hostest.test")
find("input.ai-llm-editor__api-key").fill_in(with: "1234")
find("input.ai-llm-editor__max-prompt-tokens").fill_in(with: 8000)
find(".ai-llm-editor__provider").click
find(".select-kit-row[data-value=\"vllm\"]").click
find(".ai-llm-editor__tokenizer").click
find(".select-kit-row[data-name=\"Llama3Tokenizer\"]").click
find(".ai-llm-editor__vision-enabled input").click
PageObjects::Components::DToggleSwitch.new(".ai-llm-editor__enabled-chat-bot").toggle
find(".ai-llm-editor__save").click()
expect(page).to have_current_path("/admin/plugins/discourse-ai/ai-llms")
llm = LlmModel.order(:id).last
expect(llm.display_name).to eq("Self-hosted LLM")
expect(llm.name).to eq("llava-hf/llava-v1.6-mistral-7b-hf")
expect(llm.url).to eq("srv://self-hostest.test")
expect(llm.tokenizer).to eq("DiscourseAi::Tokenizer::Llama3Tokenizer")
expect(llm.max_prompt_tokens.to_i).to eq(8000)
expect(llm.provider).to eq("vllm")
expect(llm.vision_enabled).to eq(true)
expect(llm.user_id).not_to be_nil
end
end end