From 5cac47a30a11edce65c9c18b3bc6bab034cc55e4 Mon Sep 17 00:00:00 2001 From: Sam Date: Thu, 21 Mar 2024 11:32:35 +1100 Subject: [PATCH] FIX: unify automation model translation (#540) report runner and llm triage used different paths to figure out underlying model name, unify so we use the same path. fixes claude 3 based models on llm triage --- lib/automation.rb | 17 ++++++++++++++++- lib/automation/llm_triage.rb | 14 ++------------ lib/automation/report_runner.rb | 19 +++---------------- 3 files changed, 21 insertions(+), 29 deletions(-) diff --git a/lib/automation.rb b/lib/automation.rb index 5be5c4d1..02efdce7 100644 --- a/lib/automation.rb +++ b/lib/automation.rb @@ -6,10 +6,25 @@ module DiscourseAi { id: "gpt-4-turbo", name: "discourse_automation.ai_models.gpt_4_turbo" }, { id: "gpt-4", name: "discourse_automation.ai_models.gpt_4" }, { id: "gpt-3.5-turbo", name: "discourse_automation.ai_models.gpt_3_5_turbo" }, - { id: "claude-2", name: "discourse_automation.ai_models.claude_2" }, { id: "gemini-pro", name: "discourse_automation.ai_models.gemini_pro" }, + { id: "claude-2", name: "discourse_automation.ai_models.claude_2" }, { id: "claude-3-sonnet", name: "discourse_automation.ai_models.claude_3_sonnet" }, { id: "claude-3-opus", name: "discourse_automation.ai_models.claude_3_opus" }, ] + + def self.translate_model(model) + return "google:gemini-pro" if model == "gemini-pro" + return "open_ai:#{model}" if model.start_with? "gpt" + + if model.start_with? "claude" + if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?(model) + return "aws_bedrock:#{model}" + else + return "anthropic:#{model}" + end + end + + raise "Unknown model #{model}" + end end end diff --git a/lib/automation/llm_triage.rb b/lib/automation/llm_triage.rb index 76e4b846..065fa837 100644 --- a/lib/automation/llm_triage.rb +++ b/lib/automation/llm_triage.rb @@ -32,7 +32,8 @@ module DiscourseAi result = nil - llm = DiscourseAi::Completions::Llm.proxy(translate_model(model)) + translated_model = DiscourseAi::Automation.translate_model(model) + llm = DiscourseAi::Completions::Llm.proxy(translated_model) result = llm.generate( @@ -71,17 +72,6 @@ module DiscourseAi ReviewablePost.needs_review!(target: post, created_by: Discourse.system_user) if flag_post end end - - def self.translate_model(model) - return "google:gemini-pro" if model == "gemini-pro" - return "open_ai:#{model}" if model != "claude-2" - - if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?("claude-2") - "aws_bedrock:claude-2" - else - "anthropic:claude-2" - end - end end end end diff --git a/lib/automation/report_runner.rb b/lib/automation/report_runner.rb index 466f9f7c..ea218ee1 100644 --- a/lib/automation/report_runner.rb +++ b/lib/automation/report_runner.rb @@ -65,7 +65,9 @@ module DiscourseAi I18n.t("discourse_automation.scriptables.llm_report.title") end @model = model - @llm = DiscourseAi::Completions::Llm.proxy(translate_model(model)) + + translated_model = DiscourseAi::Automation.translate_model(model) + @llm = DiscourseAi::Completions::Llm.proxy(translated_model) @category_ids = category_ids @tags = tags @allow_secure_categories = allow_secure_categories @@ -210,21 +212,6 @@ Follow the provided writing composition instructions carefully and precisely ste end end - def translate_model(model) - return "google:gemini-pro" if model == "gemini-pro" - return "open_ai:#{model}" if model.start_with? "gpt" - - if model.start_with? "claude" - if DiscourseAi::Completions::Endpoints::AwsBedrock.correctly_configured?(model) - return "aws_bedrock:#{model}" - else - return "anthropic:#{model}" - end - end - - raise "Unknown model #{model}" - end - private def suppress_notifications(raw)