FIX: Custom prefix causing allowed seeded LLMs not to be shown (#1039)

* FIX: Custom prefix causing allowed seeded LLMs not to be shown

* DEV: update spec

* not `_map` so should be string not array
This commit is contained in:
Keegan George 2024-12-23 14:42:26 +09:00 committed by GitHub
parent d15876025f
commit bdb8f1d5e0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 3 additions and 3 deletions

View File

@ -59,7 +59,7 @@ module DiscourseAi
if allowed_seeded_llms.is_a?(Array) if allowed_seeded_llms.is_a?(Array)
values = values =
values.filter do |value_h| values.filter do |value_h|
value_h[:value] > 0 || allowed_seeded_llms.include?("custom:#{value_h[:value]}") value_h[:value] > 0 || allowed_seeded_llms.include?("#{value_h[:value]}")
end end
end end

View File

@ -193,7 +193,7 @@ RSpec.describe DiscourseAi::Admin::AiSpamController do
it "correctly filters seeded llms" do it "correctly filters seeded llms" do
SiteSetting.ai_spam_detection_enabled = true SiteSetting.ai_spam_detection_enabled = true
seeded_llm = Fabricate(:llm_model, id: -1, name: "seeded") seeded_llm = Fabricate(:seeded_model)
get "/admin/plugins/discourse-ai/ai-spam.json" get "/admin/plugins/discourse-ai/ai-spam.json"
expect(response.status).to eq(200) expect(response.status).to eq(200)
@ -202,7 +202,7 @@ RSpec.describe DiscourseAi::Admin::AiSpamController do
# only includes fabricated model # only includes fabricated model
expect(json["available_llms"].length).to eq(1) expect(json["available_llms"].length).to eq(1)
SiteSetting.ai_spam_detection_model_allowed_seeded_models = seeded_llm.identifier SiteSetting.ai_spam_detection_model_allowed_seeded_models = seeded_llm.id.to_s
get "/admin/plugins/discourse-ai/ai-spam.json" get "/admin/plugins/discourse-ai/ai-spam.json"
expect(response.status).to eq(200) expect(response.status).to eq(200)