discourse-ai/app/serializers/llm_model_serializer.rb
Keegan George d26c7ac48d
FEATURE: Add spending metrics to AI usage (#1268)
This update adds metrics for estimated spending in AI usage. To make use of it, admins must add cost details to the LLM config page (input, output, and cached input costs per 1M tokens). After doing so, the metrics will appear in the AI usage dashboard as the AI plugin is used.
2025-04-17 15:09:48 -07:00

54 lines
1.3 KiB
Ruby

# frozen_string_literal: true
class LlmModelSerializer < ApplicationSerializer
# TODO: we probably should rename the table LlmModel to AiLlm
# it is consistent with AiPersona and AiTool
# LLM model is a bit confusing given that large langauge model model is a confusing
# name
root "ai_llm"
attributes :id,
:display_name,
:name,
:provider,
:max_prompt_tokens,
:max_output_tokens,
:tokenizer,
:api_key,
:url,
:enabled_chat_bot,
:provider_params,
:vision_enabled,
:input_cost,
:output_cost,
:cached_input_cost,
:used_by
has_one :user, serializer: BasicUserSerializer, embed: :object
has_many :llm_quotas, serializer: LlmQuotaSerializer, embed: :objects
def used_by
llm_usage =
(
if (scope && scope[:llm_usage])
scope[:llm_usage]
else
DiscourseAi::Configuration::LlmEnumerator.global_usage
end
)
llm_usage[object.id]
end
def api_key
object.seeded? ? "********" : object.api_key
end
def url
object.seeded? ? "********" : object.url
end
def provider
object.seeded? ? "CDCK" : object.provider
end
end