mirror of
https://github.com/discourse/discourse-ai.git
synced 2025-05-16 21:35:05 +00:00
Previously we had the behaviour for model settings so that when you try and set a model, it runs a test and returns an error if it can't run the test successfully. The error then prevents you from setting the site setting. This results in some issues when we try and automate things. This PR updates that so that the test runs and discreetly logs the changes, but doesn't prevent the setting from being set. Instead we rely on "run test" in the LLM config along with ProblemChecks to catch issues.
85 lines
2.2 KiB
Ruby
85 lines
2.2 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
module DiscourseAi
|
|
module Configuration
|
|
class LlmValidator
|
|
def initialize(opts = {})
|
|
@opts = opts
|
|
end
|
|
|
|
def valid_value?(val)
|
|
if val == ""
|
|
@parent_module_name = modules_and_choose_llm_settings.invert[@opts[:name]]
|
|
|
|
@parent_enabled = SiteSetting.public_send(@parent_module_name)
|
|
return !@parent_enabled
|
|
end
|
|
|
|
allowed_seeded_model?(val)
|
|
|
|
run_test(val).tap { |result| @unreachable = result }
|
|
rescue StandardError => e
|
|
raise e if Rails.env.test?
|
|
true
|
|
end
|
|
|
|
def run_test(val)
|
|
DiscourseAi::Completions::Llm
|
|
.proxy(val)
|
|
.generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator")
|
|
.present?
|
|
end
|
|
|
|
def modules_using(llm_model)
|
|
choose_llm_settings = modules_and_choose_llm_settings.values
|
|
|
|
choose_llm_settings.select { |s| SiteSetting.public_send(s) == "custom:#{llm_model.id}" }
|
|
end
|
|
|
|
def error_message
|
|
if @parent_enabled
|
|
return(
|
|
I18n.t(
|
|
"discourse_ai.llm.configuration.disable_module_first",
|
|
setting: @parent_module_name,
|
|
)
|
|
)
|
|
end
|
|
|
|
if @invalid_seeded_model
|
|
return I18n.t("discourse_ai.llm.configuration.invalid_seeded_model")
|
|
end
|
|
|
|
return unless @unreachable
|
|
|
|
I18n.t("discourse_ai.llm.configuration.model_unreachable")
|
|
end
|
|
|
|
def choose_llm_setting_for(module_enabler_setting)
|
|
modules_and_choose_llm_settings[module_enabler_setting]
|
|
end
|
|
|
|
def modules_and_choose_llm_settings
|
|
{
|
|
ai_embeddings_semantic_search_enabled: :ai_embeddings_semantic_search_hyde_model,
|
|
ai_helper_enabled: :ai_helper_model,
|
|
ai_summarization_enabled: :ai_summarization_model,
|
|
}
|
|
end
|
|
|
|
def allowed_seeded_model?(val)
|
|
id = val.split(":").last
|
|
return true if id.to_i > 0
|
|
|
|
setting = @opts[:name]
|
|
allowed_list = SiteSetting.public_send("#{setting}_allowed_seeded_models")
|
|
|
|
if allowed_list.split("|").exclude?(id)
|
|
@invalid_seeded_model = true
|
|
raise Discourse::InvalidParameters.new
|
|
end
|
|
end
|
|
end
|
|
end
|
|
end
|