FIX: unify automation model translation (#540)

report runner and llm triage used different paths to figure out
underlying model name, unify so we use the same path.

fixes claude 3 based models on llm triage
This commit is contained in:
Sam 2024-03-21 11:32:35 +11:00 committed by GitHub
parent e8b2a200c1
commit 5cac47a30a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 21 additions and 29 deletions

View File

@ -6,10 +6,25 @@ module DiscourseAi
{ id: "gpt-4-turbo", name: "discourse_automation.ai_models.gpt_4_turbo" },
{ id: "gpt-4", name: "discourse_automation.ai_models.gpt_4" },
{ id: "gpt-3.5-turbo", name: "discourse_automation.ai_models.gpt_3_5_turbo" },
{ id: "claude-2", name: "discourse_automation.ai_models.claude_2" },
{ id: "gemini-pro", name: "discourse_automation.ai_models.gemini_pro" },
{ id: "claude-2", name: "discourse_automation.ai_models.claude_2" },
{ id: "claude-3-sonnet", name: "discourse_automation.ai_models.claude_3_sonnet" },
{ id: "claude-3-opus", name: "discourse_automation.ai_models.claude_3_opus" },
]
def self.translate_model(model)
return "google:gemini-pro" if model == "gemini-pro"
return "open_ai:#{model}" if model.start_with? "gpt"
if model.start_with? "claude"
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?(model)
return "aws_bedrock:#{model}"
else
return "anthropic:#{model}"
end
end
raise "Unknown model #{model}"
end
end
end

View File

@ -32,7 +32,8 @@ module DiscourseAi
result = nil
llm = DiscourseAi::Completions::Llm.proxy(translate_model(model))
translated_model = DiscourseAi::Automation.translate_model(model)
llm = DiscourseAi::Completions::Llm.proxy(translated_model)
result =
llm.generate(
@ -71,17 +72,6 @@ module DiscourseAi
ReviewablePost.needs_review!(target: post, created_by: Discourse.system_user) if flag_post
end
end
def self.translate_model(model)
return "google:gemini-pro" if model == "gemini-pro"
return "open_ai:#{model}" if model != "claude-2"
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?("claude-2")
"aws_bedrock:claude-2"
else
"anthropic:claude-2"
end
end
end
end
end

View File

@ -65,7 +65,9 @@ module DiscourseAi
I18n.t("discourse_automation.scriptables.llm_report.title")
end
@model = model
@llm = DiscourseAi::Completions::Llm.proxy(translate_model(model))
translated_model = DiscourseAi::Automation.translate_model(model)
@llm = DiscourseAi::Completions::Llm.proxy(translated_model)
@category_ids = category_ids
@tags = tags
@allow_secure_categories = allow_secure_categories
@ -210,21 +212,6 @@ Follow the provided writing composition instructions carefully and precisely ste
end
end
def translate_model(model)
return "google:gemini-pro" if model == "gemini-pro"
return "open_ai:#{model}" if model.start_with? "gpt"
if model.start_with? "claude"
if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?(model)
return "aws_bedrock:#{model}"
else
return "anthropic:#{model}"
end
end
raise "Unknown model #{model}"
end
private
def suppress_notifications(raw)