DEV: Transition "Select model" settings to only use LlmModels (#675)

We no longer support the "provider:model" format in the "ai_helper_model" and
"ai_embeddings_semantic_search_hyde_model" settings. We'll migrate existing
values and work with our new data-driven LLM configs from now on.
This commit is contained in:
Roman Rizzi 2024-06-19 18:01:35 -03:00 committed by GitHub
parent ed3d5521a8
commit 8849caf136
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 111 additions and 88 deletions

View File

@ -53,12 +53,7 @@ module DiscourseAi
def destroy def destroy
llm_model = LlmModel.find(params[:id]) llm_model = LlmModel.find(params[:id])
dependant_settings = %i[ai_helper_model ai_embeddings_semantic_search_hyde_model] in_use_by = DiscourseAi::Configuration::LlmValidator.new.modules_using(llm_model)
in_use_by = []
dependant_settings.each do |s_name|
in_use_by << s_name if SiteSetting.public_send(s_name) == "custom:#{llm_model.id}"
end
if !in_use_by.empty? if !in_use_by.empty?
return( return(
@ -85,11 +80,7 @@ module DiscourseAi
llm_model = LlmModel.new(ai_llm_params) llm_model = LlmModel.new(ai_llm_params)
DiscourseAi::Completions::Llm.proxy_from_obj(llm_model).generate( DiscourseAi::Configuration::LlmValidator.new.run_test(llm_model)
"How much is 1 + 1?",
user: current_user,
feature_name: "llm_validator",
)
render json: { success: true } render json: { success: true }
rescue DiscourseAi::Completions::Endpoints::Base::CompletionFailed => e rescue DiscourseAi::Completions::Endpoints::Base::CompletionFailed => e

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class ChooseLlmModelSettingMigration < ActiveRecord::Migration[7.0]
def up
transition_to_llm_model("ai_helper_model")
transition_to_llm_model("ai_embeddings_semantic_search_hyde_model")
end
def transition_to_llm_model(llm_setting_name)
setting_value =
DB
.query_single(
"SELECT value FROM site_settings WHERE name = :llm_setting",
llm_setting: llm_setting_name,
)
.first
.to_s
return if setting_value.empty?
provider_and_model = setting_value.split(":")
provider = provider_and_model.first
model = provider_and_model.second
return if provider == "custom"
llm_model_id = DB.query_single(<<~SQL, provider: provider, model: model).first.to_s
SELECT id FROM llm_models WHERE provider = :provider AND name = :model
SQL
return if llm_model_id.empty?
DB.exec(<<~SQL, llm_setting: llm_setting_name, new_value: "custom:#{llm_model_id}")
UPDATE site_settings SET value=:new_value WHERE name=:llm_setting
SQL
end
def down
raise ActiveRecord::IrreversibleMigration
end
end

View File

@ -20,7 +20,11 @@ module DiscourseAi
class << self class << self
def provider_names def provider_names
providers = %w[aws_bedrock anthropic vllm hugging_face cohere open_ai google azure] providers = %w[aws_bedrock anthropic vllm hugging_face cohere open_ai google azure]
providers << "ollama" if Rails.env.development? if !Rails.env.production?
providers << "fake"
providers << "ollama"
end
providers providers
end end

View File

@ -10,19 +10,17 @@ module DiscourseAi
def valid_value?(val) def valid_value?(val)
return true if val == "f" return true if val == "f"
SiteSetting.public_send(llm_dependency_setting_name).present? @llm_dependency_setting_name =
DiscourseAi::Configuration::LlmValidator.new.choose_llm_setting_for(@opts[:name])
SiteSetting.public_send(@llm_dependency_setting_name).present?
end end
def error_message def error_message
I18n.t("discourse_ai.llm.configuration.set_llm_first", setting: llm_dependency_setting_name) I18n.t(
end "discourse_ai.llm.configuration.set_llm_first",
setting: @llm_dependency_setting_name,
def llm_dependency_setting_name )
if @opts[:name] == :ai_embeddings_semantic_search_enabled
:ai_embeddings_semantic_search_hyde_model
else
:ai_helper_model
end
end end
end end
end end

View File

@ -10,22 +10,14 @@ module DiscourseAi
end end
def self.values def self.values
begin values = DB.query_hash(<<~SQL)
llm_models = SELECT display_name AS name, id AS value
DiscourseAi::Completions::Llm.models_by_provider.flat_map do |provider, models| FROM llm_models
endpoint = DiscourseAi::Completions::Endpoints::Base.endpoint_for(provider.to_s) SQL
models.map do |model_name| values.each { |value_h| value_h["value"] = "custom:#{value_h["value"]}" }
{ name: endpoint.display_name(model_name), value: "#{provider}:#{model_name}" }
end
end
LlmModel.all.each do |model| values
llm_models << { name: model.display_name, value: "custom:#{model.id}" }
end
llm_models
end
end end
def self.available_ai_bots def self.available_ai_bots

View File

@ -9,33 +9,33 @@ module DiscourseAi
def valid_value?(val) def valid_value?(val)
if val == "" if val == ""
parent_module_name = modules_and_choose_llm_settings.invert[@opts[:name]]
@parent_enabled = SiteSetting.public_send(parent_module_name) @parent_enabled = SiteSetting.public_send(parent_module_name)
return !@parent_enabled return !@parent_enabled
end end
provider_and_model_name = val.split(":") llm_model_id = val.split(":")&.last
provider_name = provider_and_model_name.first llm_model = LlmModel.find_by(id: llm_model_id)
model_name_without_prov = provider_and_model_name[1..].join return false if llm_model.nil?
is_custom_model = provider_name == "custom"
# Bypass setting validations for custom models. They don't rely on site settings. run_test(llm_model).tap { |result| @unreachable = result }
if !is_custom_model rescue StandardError
endpoint = DiscourseAi::Completions::Endpoints::Base.endpoint_for(provider_name)
return false if endpoint.nil?
if !endpoint.correctly_configured?(model_name_without_prov)
@endpoint = endpoint
return false
end
if !can_talk_to_model?(val)
@unreachable = true @unreachable = true
return false false
end
end end
true def run_test(llm_model)
DiscourseAi::Completions::Llm
.proxy_from_obj(llm_model)
.generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator")
.present?
end
def modules_using(llm_model)
choose_llm_settings = modules_and_choose_llm_settings.values
choose_llm_settings.select { |s| SiteSetting.public_send(s) == "custom:#{llm_model.id}" }
end end
def error_message def error_message
@ -48,28 +48,20 @@ module DiscourseAi
) )
end end
return(I18n.t("discourse_ai.llm.configuration.model_unreachable")) if @unreachable return unless @unreachable
@endpoint&.configuration_hint I18n.t("discourse_ai.llm.configuration.model_unreachable")
end end
def parent_module_name def choose_llm_setting_for(module_enabler_setting)
if @opts[:name] == :ai_embeddings_semantic_search_hyde_model modules_and_choose_llm_settings[module_enabler_setting]
:ai_embeddings_semantic_search_enabled
else
:composer_ai_helper_enabled
end
end end
private def modules_and_choose_llm_settings
{
def can_talk_to_model?(model_name) ai_embeddings_semantic_search_enabled: :ai_embeddings_semantic_search_hyde_model,
DiscourseAi::Completions::Llm composer_ai_helper_enabled: :ai_helper_model,
.proxy(model_name) }
.generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator")
.present?
rescue StandardError
false
end end
end end
end end

View File

@ -3,7 +3,7 @@
RSpec.describe Jobs::StreamPostHelper do RSpec.describe Jobs::StreamPostHelper do
subject(:job) { described_class.new } subject(:job) { described_class.new }
before { SiteSetting.ai_helper_model = "fake:fake" } before { assign_fake_provider_to(:ai_helper_model) }
describe "#execute" do describe "#execute" do
fab!(:topic) fab!(:topic)

View File

@ -108,7 +108,7 @@ RSpec.describe DiscourseAi::AiBot::Tools::Search do
after { DiscourseAi::Embeddings::SemanticSearch.clear_cache_for(query) } after { DiscourseAi::Embeddings::SemanticSearch.clear_cache_for(query) }
it "supports semantic search when enabled" do it "supports semantic search when enabled" do
SiteSetting.ai_embeddings_semantic_search_hyde_model = "fake:fake" assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model)
SiteSetting.ai_embeddings_semantic_search_enabled = true SiteSetting.ai_embeddings_semantic_search_enabled = true
SiteSetting.ai_embeddings_discourse_service_api_endpoint = "http://test.com" SiteSetting.ai_embeddings_discourse_service_api_endpoint = "http://test.com"

View File

@ -5,7 +5,7 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do
fab!(:empty_locale_user) { Fabricate(:user, locale: "") } fab!(:empty_locale_user) { Fabricate(:user, locale: "") }
let(:prompt) { CompletionPrompt.find_by(id: mode) } let(:prompt) { CompletionPrompt.find_by(id: mode) }
before { SiteSetting.ai_helper_model = "fake:fake" } before { assign_fake_provider_to(:ai_helper_model) }
let(:english_text) { <<~STRING } let(:english_text) { <<~STRING }
To perfect his horror, Caesar, surrounded at the base of the statue by the impatient daggers of his friends, To perfect his horror, Caesar, surrounded at the base of the statue by the impatient daggers of his friends,

View File

@ -3,7 +3,7 @@
RSpec.describe DiscourseAi::AiHelper::ChatThreadTitler do RSpec.describe DiscourseAi::AiHelper::ChatThreadTitler do
subject(:titler) { described_class.new(thread) } subject(:titler) { described_class.new(thread) }
before { SiteSetting.ai_helper_model = "fake:fake" } before { assign_fake_provider_to(:ai_helper_model) }
fab!(:thread) { Fabricate(:chat_thread) } fab!(:thread) { Fabricate(:chat_thread) }
fab!(:chat_message) { Fabricate(:chat_message, thread: thread) } fab!(:chat_message) { Fabricate(:chat_message, thread: thread) }

View File

@ -5,7 +5,7 @@ describe DiscourseAi::AiHelper::EntryPoint do
fab!(:french_user) { Fabricate(:user, locale: "fr") } fab!(:french_user) { Fabricate(:user, locale: "fr") }
it "will correctly localize available prompts" do it "will correctly localize available prompts" do
SiteSetting.ai_helper_model = "fake:fake" assign_fake_provider_to(:ai_helper_model)
SiteSetting.default_locale = "en" SiteSetting.default_locale = "en"
SiteSetting.allow_user_locale = true SiteSetting.allow_user_locale = true
SiteSetting.composer_ai_helper_enabled = true SiteSetting.composer_ai_helper_enabled = true

View File

@ -6,7 +6,7 @@ RSpec.describe DiscourseAi::AiHelper::Painter do
fab!(:user) fab!(:user)
before do before do
SiteSetting.ai_helper_model = "fake:fake" assign_fake_provider_to(:ai_helper_model)
SiteSetting.ai_stability_api_url = "https://api.stability.dev" SiteSetting.ai_stability_api_url = "https://api.stability.dev"
SiteSetting.ai_stability_api_key = "abc" SiteSetting.ai_stability_api_key = "abc"
SiteSetting.ai_openai_api_key = "abc" SiteSetting.ai_openai_api_key = "abc"

View File

@ -7,7 +7,7 @@ RSpec.describe DiscourseAi::Embeddings::SemanticSearch do
let(:query) { "test_query" } let(:query) { "test_query" }
let(:subject) { described_class.new(Guardian.new(user)) } let(:subject) { described_class.new(Guardian.new(user)) }
before { SiteSetting.ai_embeddings_semantic_search_hyde_model = "fake:fake" } before { assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) }
describe "#search_for_topics" do describe "#search_for_topics" do
let(:hypothetical_post) { "This is an hypothetical post generated from the keyword test_query" } let(:hypothetical_post) { "This is an hypothetical post generated from the keyword test_query" }

View File

@ -6,6 +6,12 @@ module DiscourseAi::ChatBotHelper
bots.each { |b| b.update!(enabled_chat_bot: true) } bots.each { |b| b.update!(enabled_chat_bot: true) }
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
end end
def assign_fake_provider_to(setting_name)
Fabricate(:llm_model, provider: "fake", name: "fake").tap do |fake_llm|
SiteSetting.public_send("#{setting_name}=", "custom:#{fake_llm.id}")
end
end
end end
RSpec.configure { |c| c.include DiscourseAi::ChatBotHelper } RSpec.configure { |c| c.include DiscourseAi::ChatBotHelper }

View File

@ -28,7 +28,7 @@ describe Plugin::Instance do
fab!(:user) fab!(:user)
before do before do
SiteSetting.ai_helper_model = "fake:fake" assign_fake_provider_to(:ai_helper_model)
SiteSetting.composer_ai_helper_enabled = true SiteSetting.composer_ai_helper_enabled = true
SiteSetting.ai_helper_illustrate_post_model = "disabled" SiteSetting.ai_helper_illustrate_post_model = "disabled"
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)

View File

@ -125,12 +125,12 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
end end
it "validates the model is not in use" do it "validates the model is not in use" do
SiteSetting.ai_helper_model = "custom:#{llm_model.id}" fake_llm = assign_fake_provider_to(:ai_helper_model)
delete "/admin/plugins/discourse-ai/ai-llms/#{llm_model.id}.json" delete "/admin/plugins/discourse-ai/ai-llms/#{fake_llm.id}.json"
expect(response.status).to eq(409) expect(response.status).to eq(409)
expect(llm_model.reload).to eq(llm_model) expect(fake_llm.reload).to eq(fake_llm)
end end
end end
end end

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true # frozen_string_literal: true
RSpec.describe DiscourseAi::AiHelper::AssistantController do RSpec.describe DiscourseAi::AiHelper::AssistantController do
before { SiteSetting.ai_helper_model = "fake:fake" } before { assign_fake_provider_to(:ai_helper_model) }
describe "#suggest" do describe "#suggest" do
let(:text_to_proofread) { "The rain in spain stays mainly in the plane." } let(:text_to_proofread) { "The rain in spain stays mainly in the plane." }

View File

@ -6,7 +6,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
before do before do
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)
SiteSetting.ai_helper_model = "fake:fake" assign_fake_provider_to(:ai_helper_model)
SiteSetting.composer_ai_helper_enabled = true SiteSetting.composer_ai_helper_enabled = true
sign_in(user) sign_in(user)
end end

View File

@ -20,7 +20,7 @@ RSpec.describe "AI image caption", type: :system, js: true do
before do before do
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)
SiteSetting.ai_helper_model = "fake:fake" assign_fake_provider_to(:ai_helper_model)
SiteSetting.ai_llava_endpoint = "https://example.com" SiteSetting.ai_llava_endpoint = "https://example.com"
SiteSetting.ai_helper_enabled_features = "image_caption" SiteSetting.ai_helper_enabled_features = "image_caption"
sign_in(user) sign_in(user)

View File

@ -28,7 +28,7 @@ RSpec.describe "AI Post helper", type: :system, js: true do
before do before do
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)
SiteSetting.ai_helper_model = "fake:fake" assign_fake_provider_to(:ai_helper_model)
SiteSetting.composer_ai_helper_enabled = true SiteSetting.composer_ai_helper_enabled = true
sign_in(user) sign_in(user)
end end

View File

@ -38,7 +38,7 @@ RSpec.describe "AI Post helper", type: :system, js: true do
before do before do
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user) Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)
SiteSetting.ai_helper_model = "fake:fake" assign_fake_provider_to(:ai_helper_model)
SiteSetting.composer_ai_helper_enabled = true SiteSetting.composer_ai_helper_enabled = true
sign_in(user) sign_in(user)
end end