DEV: remove custom: prefix

and update migrations for translations
This commit is contained in:
Keegan George 2025-07-15 18:07:05 -07:00
parent b58f198ee8
commit f593ab6e1e
No known key found for this signature in database
GPG Key ID: 91B40E38537AC000
14 changed files with 75 additions and 106 deletions

View File

@ -99,7 +99,7 @@ class LlmModel < ActiveRecord::Base
end
def identifier
"custom:#{id}"
"#{id}"
end
def toggle_companion_user

View File

@ -90,7 +90,7 @@ export default class AiSpam extends Component {
this.isEnabled = model.is_enabled;
if (model.llm_id) {
this.selectedLLM = "custom:" + model.llm_id;
this.selectedLLM = model.llm_id;
} else {
if (this.availableLLMs.length) {
this.selectedLLM = this.availableLLMs[0].id;

View File

@ -16,7 +16,7 @@ discourse_ai:
type: enum
allow_any: false
enum: "DiscourseAi::Configuration::LlmEnumerator"
validator: "DiscourseAi::Configuration::SimpleLlmValidator"
validator: "DiscourseAi::Configuration::LlmValidator"
ai_sentiment_enabled:
default: false

View File

@ -6,7 +6,7 @@ class SeedAiDefaultLlmModel < ActiveRecord::Migration[7.2]
last_model_id = DB.query_single("SELECT id FROM llm_models ORDER BY id DESC LIMIT 1").first
if last_model_id.present?
execute "UPDATE site_settings SET value = 'custom:#{last_model_id}' WHERE name = 'ai_default_llm_model' AND (value IS NULL OR value = '');"
execute "UPDATE site_settings SET value = '#{last_model_id}' WHERE name = 'ai_default_llm_model' AND (value IS NULL OR value = '');"
end
end

View File

@ -12,7 +12,7 @@ class CopyAiHelperModelToPersonaDefault < ActiveRecord::Migration[7.2]
execute(<<~SQL)
UPDATE ai_personas
SET default_llm_id = #{model_id}
WHERE id IN (-18, -19, -20, -21, -22, -23, -24, -25, -26) AND default_llm_id IS NULL
WHERE id IN (-18, -19, -20, -21, -22, -23, -24, -25) AND default_llm_id IS NULL
SQL
end
end

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class CopyTranslationModelToPersona < ActiveRecord::Migration[7.2]
def up
ai_translation_model =
DB.query_single("SELECT value FROM site_settings WHERE name = 'ai_translation_model'").first
if ai_translation_model.present? && ai_translation_model.start_with?("custom:")
# Extract the model ID from the setting value (e.g., "custom:-5" -> "-5")
model_id = ai_translation_model.split(":").last
# Update the translation personas (IDs -27, -28, -29, -30) with the extracted model ID
execute(<<~SQL)
UPDATE ai_personas
SET default_llm_id = #{model_id}
WHERE id IN (-27, -28, -29, -30) AND default_llm_id IS NULL
SQL
end
end
def down
raise ActiveRecord::IrreversibleMigration
end
end

View File

@ -0,0 +1,26 @@
# frozen_string_literal: true
class CopyAiImageCaptionModelToPersonaDefault < ActiveRecord::Migration[7.2]
def up
ai_helper_image_caption_model =
DB.query_single(
"SELECT value FROM site_settings WHERE name = 'ai_helper_image_caption_model'",
).first
if ai_helper_image_caption_model.present? &&
ai_helper_image_caption_model.start_with?("custom:")
# Extract the model ID from the setting value (e.g., "custom:1" -> "1")
model_id = ai_helper_image_caption_model.split(":").last
# Update the helper personas with the extracted model ID
execute(<<~SQL)
UPDATE ai_personas
SET default_llm_id = #{model_id}
WHERE id IN (-26) AND default_llm_id IS NULL
SQL
end
end
def down
raise ActiveRecord::IrreversibleMigration
end
end

View File

@ -314,19 +314,11 @@ module DiscourseAi
# 1. Persona's default LLM
# 2. SiteSetting.ai_default_llm_id (or newest LLM if not set)
def self.find_ai_helper_model(helper_mode, persona_klass)
model_id =
persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider.
model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model
if model_id.present?
LlmModel.find_by(id: model_id)
else
last_model_id = LlmModel.last&.id
# SiteSetting.ai_default_llm_model shouldn't be empty, but if it is, we set it to the last model.
if last_model_id.present? && SiteSetting.ai_default_llm_model.empty?
SiteSetting.set_and_log("ai_default_llm_model", "custom:#{last_model_id}", Discourse.system_user) # Remove legacy custom provider.
end
LlmModel.last
end
end

View File

@ -5,6 +5,7 @@ require "enum_site_setting"
module DiscourseAi
module Configuration
class LlmEnumerator < ::EnumSiteSetting
# TODO: global_usage is no longer accurate, it should be removed/updated
def self.global_usage
rval = Hash.new { |h, k| h[k] = [] }
@ -107,7 +108,6 @@ module DiscourseAi
end
end
values.each { |value_h| value_h[:value] = "custom:#{value_h[:value]}" }
values
end
end

View File

@ -2,9 +2,6 @@
module DiscourseAi
module Configuration
class InvalidSeededModelError < StandardError
end
class LlmValidator
def initialize(opts = {})
@opts = opts
@ -18,12 +15,7 @@ module DiscourseAi
return !@parent_enabled
end
allowed_seeded_model?(val)
run_test(val).tap { |result| @unreachable = result }
rescue DiscourseAi::Configuration::InvalidSeededModelError => e
@unreachable = true
false
rescue StandardError => e
raise e if Rails.env.test?
@unreachable = true
@ -31,6 +23,11 @@ module DiscourseAi
end
def run_test(val)
if Rails.env.test?
# In test mode, we assume the model is reachable.
return true
end
DiscourseAi::Completions::Llm
.proxy(val)
.generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator")
@ -53,10 +50,6 @@ module DiscourseAi
)
end
if @invalid_seeded_model
return I18n.t("discourse_ai.llm.configuration.invalid_seeded_model")
end
return unless @unreachable
I18n.t("discourse_ai.llm.configuration.model_unreachable")
@ -68,25 +61,12 @@ module DiscourseAi
def modules_and_choose_llm_settings
{
ai_embeddings_semantic_search_enabled: :ai_embeddings_semantic_search_hyde_model,
ai_helper_enabled: :ai_helper_model,
ai_summarization_enabled: :ai_summarization_model,
ai_translation_enabled: :ai_translation_model,
ai_embeddings_semantic_search_enabled: :ai_default_llm_model,
ai_helper_enabled: :ai_default_llm_model,
ai_summarization_enabled: :ai_default_llm_model,
ai_translation_enabled: :ai_default_llm_model,
}
end
def allowed_seeded_model?(val)
id = val.split(":").last
return true if id.to_i > 0
setting = @opts[:name]
allowed_list = SiteSetting.public_send("#{setting}_allowed_seeded_models")
if allowed_list.split("|").exclude?(id)
@invalid_seeded_model = true
raise DiscourseAi::Configuration::InvalidSeededModelError.new
end
end
end
end
end

View File

@ -1,39 +0,0 @@
# frozen_string_literal: true
module DiscourseAi
module Configuration
class SimpleLlmValidator
def initialize(opts = {})
@opts = opts
end
def valid_value?(val)
return true if val == ""
run_test(val).tap { |result| @unreachable = result }
rescue StandardError => e
raise e if Rails.env.test?
@unreachable = true
true
end
def run_test(val)
if Rails.env.test?
# In test mode, we assume the model is reachable.
return true
end
DiscourseAi::Completions::Llm
.proxy(val)
.generate("How much is 1 + 1?", user: nil, feature_name: "llm_validator")
.present?
end
def error_message
return unless @unreachable
I18n.t("discourse_ai.llm.configuration.model_unreachable")
end
end
end
end

View File

@ -212,22 +212,13 @@ module DiscourseAi
# 1. Persona's default LLM
# 2. SiteSetting.ai_default_llm_id (or newest LLM if not set)
def find_ai_hyde_model(persona_klass)
model_id =
persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider.
model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model
if model_id.present?
LlmModel.find_by(id: model_id)
else
last_model_id = LlmModel.last&.id
# SiteSetting.ai_default_llm_model shouldn't be empty, but if it is, we set it to the last model.
if last_model_id.present? && SiteSetting.ai_default_llm_model.empty?
SiteSetting.set_and_log("ai_default_llm_model", "custom:#{last_model_id}", Discourse.system_user) # Remove legacy custom provider.
end
LlmModel.last
end
end
private

View File

@ -56,19 +56,11 @@ module DiscourseAi
# 1. Persona's default LLM
# 2. SiteSetting.ai_default_llm_id (or newest LLM if not set)
def find_summarization_model(persona_klass)
model_id =
persona_klass.default_llm_id || SiteSetting.ai_default_llm_model&.split(":")&.last # Remove legacy custom provider.
model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model
if model_id.present?
LlmModel.find_by(id: model_id)
else
last_model_id = LlmModel.last&.id
# SiteSetting.ai_default_llm_model shouldn't be empty, but if it is, we set it to the last model.
if last_model_id.present? && SiteSetting.ai_default_llm_model.empty?
SiteSetting.set_and_log("ai_default_llm_model", "custom:#{last_model_id}", Discourse.system_user) # Remove legacy custom provider.
end
LlmModel.last
end
end
@ -79,7 +71,7 @@ module DiscourseAi
persona = persona_klass.new
user = User.find_by(id: persona_klass.user_id) || Discourse.system_user
bot = DiscourseAi::Personas::Bot.as(user, persona: persona, model: llm_model)
DiscourseAi::Personas::Bot.as(user, persona: persona, model: llm_model)
end
end
end

View File

@ -60,9 +60,13 @@ module DiscourseAi
end
def self.preferred_llm_model(persona_klass)
id = persona_klass.default_llm_id || SiteSetting.ai_translation_model&.split(":")&.last
return nil if id.blank?
LlmModel.find_by(id:)
model_id = persona_klass.default_llm_id || SiteSetting.ai_default_llm_model
if model_id.present?
LlmModel.find_by(id: model_id)
else
LlmModel.last
end
end
end
end