discourse-ai/lib/configuration/llm_enumerator.rb
Roman Rizzi 8849caf136
DEV: Transition "Select model" settings to only use LlmModels (#675)
We no longer support the "provider:model" format in the "ai_helper_model" and
"ai_embeddings_semantic_search_hyde_model" settings. We'll migrate existing
values and work with our new data-driven LLM configs from now on.
2024-06-19 18:01:35 -03:00

41 lines
813 B
Ruby

# frozen_string_literal: true
require "enum_site_setting"
module DiscourseAi
module Configuration
class LlmEnumerator < ::EnumSiteSetting
def self.valid_value?(val)
true
end
def self.values
values = DB.query_hash(<<~SQL)
SELECT display_name AS name, id AS value
FROM llm_models
SQL
values.each { |value_h| value_h["value"] = "custom:#{value_h["value"]}" }
values
end
def self.available_ai_bots
%w[
gpt-3.5-turbo
gpt-4
gpt-4-turbo
gpt-4o
claude-2
gemini-1.5-pro
mixtral-8x7B-Instruct-V0.1
claude-3-opus
claude-3-sonnet
claude-3-haiku
cohere-command-r-plus
]
end
end
end
end