DEV: Use full URL for problem check message (#1165)

Better to construct the URL in Ruby and pass it to I18n, so we don't have to mess with the translations if the URL changes.
This commit is contained in:
Ted Johansson 2025-03-05 11:31:23 +08:00 committed by GitHub
parent fff0bc0f8c
commit 584f5f2b6e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 18 additions and 14 deletions

View File

@ -8,10 +8,6 @@ class ProblemCheck::AiLlmStatus < ProblemCheck
llm_errors llm_errors
end end
def base_path
Discourse.base_path
end
private private
def llm_errors def llm_errors
@ -26,20 +22,21 @@ class ProblemCheck::AiLlmStatus < ProblemCheck
blk.call blk.call
nil nil
rescue => e rescue => e
error_message = parse_error_message(e.message) details = {
message = model_id: model.id,
"#{I18n.t("dashboard.problem.ai_llm_status", { base_path: base_path, model_name: model.display_name, model_id: model.id })}" model_name: model.display_name,
error: parse_error_message(e.message),
url: "#{Discourse.base_path}/admin/plugins/discourse-ai/ai-llms/#{model.id}/edit",
}
message = I18n.t("dashboard.problem.ai_llm_status", details)
Problem.new( Problem.new(
message, message,
priority: "high", priority: "high",
identifier: "ai_llm_status", identifier: "ai_llm_status",
target: model.id, target: model.id,
details: { details:,
model_id: model.id,
model_name: model.display_name,
error: error_message,
},
) )
end end
end end

View File

@ -484,4 +484,4 @@ en:
prompt_message_length: The message %{idx} is over the 1000 character limit. prompt_message_length: The message %{idx} is over the 1000 character limit.
dashboard: dashboard:
problem: problem:
ai_llm_status: "The LLM model: %{model_name} is encountering issues. Please check the <a href='%{base_path}/admin/plugins/discourse-ai/ai-llms/%{model_id}/edit'>model's configuration page</a>." ai_llm_status: "The LLM model: %{model_name} is encountering issues. Please check the <a href='%{url}'>model's configuration page</a>."

View File

@ -42,7 +42,13 @@ RSpec.describe ProblemCheck::AiLlmStatus do
it "returns a problem with an LLM model" do it "returns a problem with an LLM model" do
stub_request(:post, post_url).to_return(status: 403, body: error_response, headers: {}) stub_request(:post, post_url).to_return(status: 403, body: error_response, headers: {})
message = message =
"#{I18n.t("dashboard.problem.ai_llm_status", { base_path: Discourse.base_path, model_name: llm_model.display_name, model_id: llm_model.id })}" I18n.t(
"dashboard.problem.ai_llm_status",
{
model_name: llm_model.display_name,
url: "/admin/plugins/discourse-ai/ai-llms/#{llm_model.id}/edit",
},
)
expect(described_class.new.call).to contain_exactly( expect(described_class.new.call).to contain_exactly(
have_attributes( have_attributes(
@ -53,6 +59,7 @@ RSpec.describe ProblemCheck::AiLlmStatus do
details: { details: {
model_id: llm_model.id, model_id: llm_model.id,
model_name: llm_model.display_name, model_name: llm_model.display_name,
url: "/admin/plugins/discourse-ai/ai-llms/#{llm_model.id}/edit",
error: JSON.parse(error_response)["message"], error: JSON.parse(error_response)["message"],
}, },
), ),