FIX: dependency validator should depend on default LLM setting

This commit is contained in:
Keegan George 2025-07-16 07:08:29 -07:00
parent 7ae61ce877
commit c26d604072
No known key found for this signature in database
GPG Key ID: 91B40E38537AC000
2 changed files with 3 additions and 12 deletions

View File

@ -10,17 +10,10 @@ module DiscourseAi
def valid_value?(val)
return true if val == "f"
if @opts[:name] == :ai_summarization_enabled || @opts[:name] == :ai_helper_enabled ||
@opts[:name] == :ai_embeddings_semantic_search_enabled
has_llms = LlmModel.count > 0
@no_llms_configured = !has_llms
has_llms
else
@llm_dependency_setting_name =
DiscourseAi::Configuration::LlmValidator.new.choose_llm_setting_for(@opts[:name])
@llm_dependency_setting_name =
DiscourseAi::Configuration::LlmValidator.new.choose_llm_setting_for(@opts[:name])
SiteSetting.public_send(@llm_dependency_setting_name).present?
end
SiteSetting.public_send(@llm_dependency_setting_name).present?
end
def error_message

View File

@ -16,8 +16,6 @@ module DiscourseAi
WHERE vision_enabled
SQL
values.each { |value_h| value_h[:value] = "custom:#{value_h[:value]}" }
values
end
end