DEV: Transition "Select model" settings to only use LlmModels (#675)

We no longer support the "provider:model" format in the "ai_helper_model" and
"ai_embeddings_semantic_search_hyde_model" settings. We'll migrate existing
values and work with our new data-driven LLM configs from now on.
This commit is contained in:
Roman Rizzi 2024-06-19 18:01:35 -03:00 committed by GitHub
parent ed3d5521a8
commit 8849caf136
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 111 additions and 88 deletions

View File

@ -53,12 +53,7 @@ module DiscourseAi
def destroy
llm_model = LlmModel.find(params[:id])
dependant_settings = %i[ai_helper_model ai_embeddings_semantic_search_hyde_model]
in_use_by = []
dependant_settings.each do |s_name|
in_use_by << s_name if SiteSetting.public_send(s_name) == "custom:#{llm_model.id}"
end
in_use_by = DiscourseAi::Configuration::LlmValidator.new.modules_using(llm_model)
if !in_use_by.empty?
return(
@ -85,11 +80,7 @@ module DiscourseAi
llm_model = LlmModel.new(ai_llm_params)
DiscourseAi::Completions::Llm.proxy_from_obj(llm_model).generate(
"How much is 1 + 1?",
user: current_user,
feature_name: "llm_validator",
)
DiscourseAi::Configuration::LlmValidator.new.run_test(llm_model)
render json: { success: true }
rescue DiscourseAi::Completions::Endpoints::Base::CompletionFailed => e

View File

@ -0,0 +1,40 @@
# frozen_string_literal: true
class ChooseLlmModelSettingMigration < ActiveRecord::Migration[7.0]
def up
transition_to_llm_model("ai_helper_model")
transition_to_llm_model("ai_embeddings_semantic_search_hyde_model")
end
def transition_to_llm_model(llm_setting_name)
setting_value =
DB
.query_single(
"SELECT value FROM site_settings WHERE name = :llm_setting",
llm_setting: llm_setting_name,
)
.first
.to_s
return if setting_value.empty?
provider_and_model = setting_value.split(":")
provider = provider_and_model.first
model = provider_and_model.second
return if provider == "custom"
llm_model_id = DB.query_single(<<~SQL, provider: provider, model: model).first.to_s
SELECT id FROM llm_models WHERE provider = :provider AND name = :model
SQL
return if llm_model_id.empty?
DB.exec(<<~SQL, llm_setting: llm_setting_name, new_value: "custom:#{llm_model_id}")
UPDATE site_settings SET value=:new_value WHERE name=:llm_setting
SQL
end
def down
raise ActiveRecord::IrreversibleMigration
end
end

View File

@ -20,7 +20,11 @@ module DiscourseAi
class << self
def provider_names
providers = %w[aws_bedrock anthropic vllm hugging_face cohere open_ai google azure]
providers << "ollama" if Rails.env.development?
if !Rails.env.production?
providers << "fake"
providers << "ollama"
end
providers
end

View File

@ -10,19 +10,17 @@ module DiscourseAi
def valid_value?(val)
return true if val == "f"
SiteSetting.public_send(llm_dependency_setting_name).present?
@llm_dependency_setting_name =
DiscourseAi::Configuration::LlmValidator.new.choose_llm_setting_for(@opts[:name])
SiteSetting.public_send(@llm_dependency_setting_name).present?
end
def error_message
I18n.t("discourse_ai.llm.configuration.set_llm_first", setting: llm_dependency_setting_name)
end
def llm_dependency_setting_name
if @opts[:name] == :ai_embeddings_semantic_search_enabled
:ai_embeddings_semantic_search_hyde_model
else
:ai_helper_model
end
I18n.t(
"discourse_ai.llm.configuration.set_llm_first",
setting: @llm_dependency_setting_name,
)
end
end
end

View File

@ -10,22 +10,14 @@ module DiscourseAi
end
def self.values
begin
llm_models =
DiscourseAi::Completions::Llm.models_by_provider.flat_map do |provider, models|
endpoint = DiscourseAi::Completions::Endpoints::Base.endpoint_for(provider.to_s)
values = DB.query_hash(<<~SQL)
SELECT display_name AS name, id AS value
FROM llm_models
SQL
models.map do |model_name|
{ name: endpoint.display_name(model_name), value: "#{provider}:#{model_name}" }
end
end
values.each { |value_h| value_h["value"] = "custom:#{value_h["value"]}" }
LlmModel.all.each do |model|
llm_models << { name: model.display_name, value: "custom:#{model.id}" }
end
llm_models
end
values
end
def self.available_ai_bots

View File

@ -9,33 +9,33 @@ module DiscourseAi
def valid_value?(val)
if val == ""
parent_module_name = modules_and_choose_llm_settings.invert[@opts[:name]]
@parent_enabled = SiteSetting.public_send(parent_module_name)
return !@parent_enabled
end
provider_and_model_name = val.split(":")
provider_name = provider_and_model_name.first
model_name_without_prov = provider_and_model_name[1..].join
is_custom_model = provider_name == "custom"
llm_model_id = val.split(":")&.last
llm_model = LlmModel.find_by(id: llm_model_id)
return false if llm_model.nil?
# Bypass setting validations for custom models. They don't rely on site settings.
if !is_custom_model
endpoint = DiscourseAi::Completions::Endpoints::Base.endpoint_for(provider_name)
run_test(llm_model).tap { |result| @unreachable = result }
rescue StandardError
@unreachable = true
false
end
return false if endpoint.nil?
def run_test(llm_model)
DiscourseAi::Completions::Llm
.proxy_from_obj(llm_model)
.generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator")
.present?
end
if !endpoint.correctly_configured?(model_name_without_prov)
@endpoint = endpoint
return false
end
def modules_using(llm_model)
choose_llm_settings = modules_and_choose_llm_settings.values
if !can_talk_to_model?(val)
@unreachable = true
return false
end
end
true
choose_llm_settings.select { |s| SiteSetting.public_send(s) == "custom:#{llm_model.id}" }
end
def error_message
@ -48,28 +48,20 @@ module DiscourseAi
)
end
return(I18n.t("discourse_ai.llm.configuration.model_unreachable")) if @unreachable
return unless @unreachable
@endpoint&.configuration_hint
I18n.t("discourse_ai.llm.configuration.model_unreachable")
end
def parent_module_name
if @opts[:name] == :ai_embeddings_semantic_search_hyde_model
:ai_embeddings_semantic_search_enabled
else
:composer_ai_helper_enabled
end
def choose_llm_setting_for(module_enabler_setting)
modules_and_choose_llm_settings[module_enabler_setting]
end
private
def can_talk_to_model?(model_name)
DiscourseAi::Completions::Llm
.proxy(model_name)
.generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator")
.present?
rescue StandardError
false
def modules_and_choose_llm_settings
{
ai_embeddings_semantic_search_enabled: :ai_embeddings_semantic_search_hyde_model,
composer_ai_helper_enabled: :ai_helper_model,
}
end
end
end

View File

@ -3,7 +3,7 @@
RSpec.describe Jobs::StreamPostHelper do
subject(:job) { described_class.new }
before { SiteSetting.ai_helper_model = "fake:fake" }
before { assign_fake_provider_to(:ai_helper_model) }
describe "#execute" do
fab!(:topic)

View File

@ -108,7 +108,7 @@ RSpec.describe DiscourseAi::AiBot::Tools::Search do
after { DiscourseAi::Embeddings::SemanticSearch.clear_cache_for(query) }
it "supports semantic search when enabled" do
SiteSetting.ai_embeddings_semantic_search_hyde_model = "fake:fake"
assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model)
SiteSetting.ai_embeddings_semantic_search_enabled = true
SiteSetting.ai_embeddings_discourse_service_api_endpoint = "http://test.com"

View File

@ -5,7 +5,7 @@ RSpec.describe DiscourseAi::AiHelper::Assistant do
fab!(:empty_locale_user) { Fabricate(:user, locale: "") }
let(:prompt) { CompletionPrompt.find_by(id: mode) }
before { SiteSetting.ai_helper_model = "fake:fake" }
before { assign_fake_provider_to(:ai_helper_model) }
let(:english_text) { <<~STRING }
To perfect his horror, Caesar, surrounded at the base of the statue by the impatient daggers of his friends,

View File

@ -3,7 +3,7 @@
RSpec.describe DiscourseAi::AiHelper::ChatThreadTitler do
subject(:titler) { described_class.new(thread) }
before { SiteSetting.ai_helper_model = "fake:fake" }
before { assign_fake_provider_to(:ai_helper_model) }
fab!(:thread) { Fabricate(:chat_thread) }
fab!(:chat_message) { Fabricate(:chat_message, thread: thread) }

View File

@ -5,7 +5,7 @@ describe DiscourseAi::AiHelper::EntryPoint do
fab!(:french_user) { Fabricate(:user, locale: "fr") }
it "will correctly localize available prompts" do
SiteSetting.ai_helper_model = "fake:fake"
assign_fake_provider_to(:ai_helper_model)
SiteSetting.default_locale = "en"
SiteSetting.allow_user_locale = true
SiteSetting.composer_ai_helper_enabled = true

View File

@ -6,7 +6,7 @@ RSpec.describe DiscourseAi::AiHelper::Painter do
fab!(:user)
before do
SiteSetting.ai_helper_model = "fake:fake"
assign_fake_provider_to(:ai_helper_model)
SiteSetting.ai_stability_api_url = "https://api.stability.dev"
SiteSetting.ai_stability_api_key = "abc"
SiteSetting.ai_openai_api_key = "abc"

View File

@ -7,7 +7,7 @@ RSpec.describe DiscourseAi::Embeddings::SemanticSearch do
let(:query) { "test_query" }
let(:subject) { described_class.new(Guardian.new(user)) }
before { SiteSetting.ai_embeddings_semantic_search_hyde_model = "fake:fake" }
before { assign_fake_provider_to(:ai_embeddings_semantic_search_hyde_model) }
describe "#search_for_topics" do
let(:hypothetical_post) { "This is an hypothetical post generated from the keyword test_query" }

View File

@ -6,6 +6,12 @@ module DiscourseAi::ChatBotHelper
bots.each { |b| b.update!(enabled_chat_bot: true) }
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
end
def assign_fake_provider_to(setting_name)
Fabricate(:llm_model, provider: "fake", name: "fake").tap do |fake_llm|
SiteSetting.public_send("#{setting_name}=", "custom:#{fake_llm.id}")
end
end
end
RSpec.configure { |c| c.include DiscourseAi::ChatBotHelper }

View File

@ -28,7 +28,7 @@ describe Plugin::Instance do
fab!(:user)
before do
SiteSetting.ai_helper_model = "fake:fake"
assign_fake_provider_to(:ai_helper_model)
SiteSetting.composer_ai_helper_enabled = true
SiteSetting.ai_helper_illustrate_post_model = "disabled"
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)

View File

@ -125,12 +125,12 @@ RSpec.describe DiscourseAi::Admin::AiLlmsController do
end
it "validates the model is not in use" do
SiteSetting.ai_helper_model = "custom:#{llm_model.id}"
fake_llm = assign_fake_provider_to(:ai_helper_model)
delete "/admin/plugins/discourse-ai/ai-llms/#{llm_model.id}.json"
delete "/admin/plugins/discourse-ai/ai-llms/#{fake_llm.id}.json"
expect(response.status).to eq(409)
expect(llm_model.reload).to eq(llm_model)
expect(fake_llm.reload).to eq(fake_llm)
end
end
end

View File

@ -1,7 +1,7 @@
# frozen_string_literal: true
RSpec.describe DiscourseAi::AiHelper::AssistantController do
before { SiteSetting.ai_helper_model = "fake:fake" }
before { assign_fake_provider_to(:ai_helper_model) }
describe "#suggest" do
let(:text_to_proofread) { "The rain in spain stays mainly in the plane." }

View File

@ -6,7 +6,7 @@ RSpec.describe "AI Composer helper", type: :system, js: true do
before do
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)
SiteSetting.ai_helper_model = "fake:fake"
assign_fake_provider_to(:ai_helper_model)
SiteSetting.composer_ai_helper_enabled = true
sign_in(user)
end

View File

@ -20,7 +20,7 @@ RSpec.describe "AI image caption", type: :system, js: true do
before do
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)
SiteSetting.ai_helper_model = "fake:fake"
assign_fake_provider_to(:ai_helper_model)
SiteSetting.ai_llava_endpoint = "https://example.com"
SiteSetting.ai_helper_enabled_features = "image_caption"
sign_in(user)

View File

@ -28,7 +28,7 @@ RSpec.describe "AI Post helper", type: :system, js: true do
before do
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)
SiteSetting.ai_helper_model = "fake:fake"
assign_fake_provider_to(:ai_helper_model)
SiteSetting.composer_ai_helper_enabled = true
sign_in(user)
end

View File

@ -38,7 +38,7 @@ RSpec.describe "AI Post helper", type: :system, js: true do
before do
Group.find_by(id: Group::AUTO_GROUPS[:admins]).add(user)
SiteSetting.ai_helper_model = "fake:fake"
assign_fake_provider_to(:ai_helper_model)
SiteSetting.composer_ai_helper_enabled = true
sign_in(user)
end