FEATURE: AI Usage page (#964)

- Added a new admin interface to track AI usage metrics, including tokens, features, and models.
- Introduced a new route `/admin/plugins/discourse-ai/ai-usage` and supporting API endpoint in `AiUsageController`.
- Implemented `AiUsageSerializer` for structuring AI usage data.
- Integrated CSS stylings for charts and tables under `stylesheets/modules/llms/common/usage.scss`.
- Enhanced backend with `AiApiAuditLog` model changes: added `cached_tokens` column  (implemented with OpenAI for now) with relevant DB migration and indexing.
- Created `Report` module for efficient aggregation and filtering of AI usage metrics.
- Updated AI Bot title generation logic to log correctly to user vs bot
- Extended test coverage for the new tracking features, ensuring data consistency and access controls.
This commit is contained in:
Sam 2024-11-29 06:26:48 +11:00 committed by GitHub
parent c980c34d77
commit bc0657f478
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 1108 additions and 9 deletions

View File

@ -0,0 +1,11 @@
import { service } from "@ember/service";
import { ajax } from "discourse/lib/ajax";
import DiscourseRoute from "discourse/routes/discourse";
export default class DiscourseAiUsageRoute extends DiscourseRoute {
@service store;
model() {
return ajax("/admin/plugins/discourse-ai/ai-usage.json");
}
}

View File

@ -0,0 +1 @@
<AiUsage @model={{this.model}} />

View File

@ -0,0 +1,27 @@
# frozen_string_literal: true
module DiscourseAi
module Admin
class AiUsageController < ::Admin::AdminController
requires_plugin "discourse-ai"
def show
render json: AiUsageSerializer.new(create_report, root: false)
end
private
def create_report
report =
DiscourseAi::Completions::Report.new(
start_date: params[:start_date]&.to_date || 30.days.ago,
end_date: params[:end_date]&.to_date || Time.current,
)
report = report.filter_by_feature(params[:feature]) if params[:feature].present?
report = report.filter_by_model(params[:model]) if params[:model].present?
report
end
end
end
end

View File

@ -3,6 +3,7 @@
class AiApiAuditLog < ActiveRecord::Base
belongs_to :post
belongs_to :topic
belongs_to :user
module Provider
OpenAI = 1
@ -43,3 +44,10 @@ end
# feature_name :string(255)
# language_model :string(255)
# feature_context :jsonb
# cached_tokens :integer
#
# Indexes
#
# index_ai_api_audit_logs_on_created_at_and_feature_name (created_at,feature_name)
# index_ai_api_audit_logs_on_created_at_and_language_model (created_at,language_model)
#

View File

@ -0,0 +1,69 @@
# frozen_string_literal: true
class AiUsageSerializer < ApplicationSerializer
attributes :data, :features, :models, :users, :summary, :period
def data
object.tokens_by_period.as_json(
only: %i[period total_tokens total_cached_tokens total_request_tokens total_response_tokens],
)
end
def period
object.guess_period
end
def features
object.feature_breakdown.as_json(
only: %i[
feature_name
usage_count
total_tokens
total_cached_tokens
total_request_tokens
total_response_tokens
],
)
end
def models
object.model_breakdown.as_json(
only: %i[
llm
usage_count
total_tokens
total_cached_tokens
total_request_tokens
total_response_tokens
],
)
end
def users
object.user_breakdown.map do |user|
{
avatar_template: User.avatar_template(user.username, user.uploaded_avatar_id),
username: user.username,
usage_count: user.usage_count,
total_tokens: user.total_tokens,
total_cached_tokens: user.total_cached_tokens,
total_request_tokens: user.total_request_tokens,
total_response_tokens: user.total_response_tokens,
}
end
end
def summary
{
total_tokens: object.total_tokens,
total_cached_tokens: object.total_cached_tokens,
total_request_tokens: object.total_request_tokens,
total_response_tokens: object.total_response_tokens,
total_requests: object.total_requests,
date_range: {
start: object.start_date,
end: object.end_date,
},
}
end
end

View File

@ -18,5 +18,6 @@ export default {
this.route("new");
this.route("show", { path: "/:id" });
});
this.route("discourse-ai-usage", { path: "ai-usage" });
},
};

View File

@ -0,0 +1,484 @@
import Component from "@glimmer/component";
import { tracked } from "@glimmer/tracking";
import { fn, hash } from "@ember/helper";
import { on } from "@ember/modifier";
import { action } from "@ember/object";
import { LinkTo } from "@ember/routing";
import { service } from "@ember/service";
import { eq } from "truth-helpers";
import DateTimeInputRange from "discourse/components/date-time-input-range";
import avatar from "discourse/helpers/avatar";
import { ajax } from "discourse/lib/ajax";
import i18n from "discourse-common/helpers/i18n";
import Chart from "admin/components/chart";
import ComboBox from "select-kit/components/combo-box";
export default class AiUsage extends Component {
@service store;
@tracked startDate = moment().subtract(30, "days").toDate();
@tracked endDate = new Date();
@tracked data = this.args.model;
@tracked selectedFeature;
@tracked selectedModel;
@tracked selectedPeriod = "month";
@tracked isCustomDateActive = false;
@action
async fetchData() {
const response = await ajax("/admin/plugins/discourse-ai/ai-usage.json", {
data: {
start_date: moment(this.startDate).format("YYYY-MM-DD"),
end_date: moment(this.endDate).format("YYYY-MM-DD"),
feature: this.selectedFeature,
model: this.selectedModel,
},
});
this.data = response;
}
@action
async onFilterChange() {
await this.fetchData();
}
@action
onFeatureChanged(value) {
this.selectedFeature = value;
this.onFilterChange();
}
@action
onModelChanged(value) {
this.selectedModel = value;
this.onFilterChange();
}
normalizeTimeSeriesData(data) {
if (!data?.length) {
return [];
}
const startDate = moment(this.startDate);
const endDate = moment(this.endDate);
const normalized = [];
let interval;
let format;
if (this.data.period === "hour") {
interval = "hour";
format = "YYYY-MM-DD HH:00:00";
} else if (this.data.period === "day") {
interval = "day";
format = "YYYY-MM-DD";
} else {
interval = "month";
format = "YYYY-MM";
}
const dataMap = new Map(
data.map((d) => [moment(d.period).format(format), d])
);
for (
let m = moment(startDate);
m.isSameOrBefore(endDate);
m.add(1, interval)
) {
const dateKey = m.format(format);
const existingData = dataMap.get(dateKey);
normalized.push(
existingData || {
period: m.format(),
total_tokens: 0,
total_cached_tokens: 0,
total_request_tokens: 0,
total_response_tokens: 0,
}
);
}
return normalized;
}
get chartConfig() {
if (!this.data?.data) {
return;
}
const normalizedData = this.normalizeTimeSeriesData(this.data.data);
const chartEl = document.querySelector(".ai-usage__chart");
const computedStyle = getComputedStyle(chartEl);
const colors = {
response: computedStyle.getPropertyValue("--chart-response-color").trim(),
request: computedStyle.getPropertyValue("--chart-request-color").trim(),
cached: computedStyle.getPropertyValue("--chart-cached-color").trim(),
};
return {
type: "bar",
data: {
labels: normalizedData.map((row) => {
const date = moment(row.period);
if (this.data.period === "hour") {
return date.format("HH:00");
} else if (this.data.period === "day") {
return date.format("DD-MMM");
} else {
return date.format("MMM-YY");
}
}),
datasets: [
{
label: "Response Tokens",
data: normalizedData.map((row) => row.total_response_tokens),
backgroundColor: colors.response,
},
{
label: "Net Request Tokens",
data: normalizedData.map(
(row) => row.total_request_tokens - row.total_cached_tokens
),
backgroundColor: colors.request,
},
{
label: "Cached Request Tokens",
data: normalizedData.map((row) => row.total_cached_tokens),
backgroundColor: colors.cached,
},
],
},
options: {
responsive: true,
scales: {
x: {
stacked: true,
},
y: {
stacked: true,
beginAtZero: true,
},
},
},
};
}
get availableFeatures() {
// when you switch we don't want the list to change
// only when you switch durations
this._cachedFeatures =
this._cachedFeatures ||
(this.data?.features || []).map((f) => ({
id: f.feature_name,
name: f.feature_name,
}));
return this._cachedFeatures;
}
get availableModels() {
this._cachedModels =
this._cachedModels ||
(this.data?.models || []).map((m) => ({
id: m.llm,
name: m.llm,
}));
return this._cachedModels;
}
get periodOptions() {
return [
{ id: "day", name: "Last 24 Hours" },
{ id: "week", name: "Last Week" },
{ id: "month", name: "Last Month" },
];
}
@action
setPeriodDates(period) {
const now = moment();
switch (period) {
case "day":
this.startDate = now.clone().subtract(1, "day").toDate();
this.endDate = now.toDate();
break;
case "week":
this.startDate = now.clone().subtract(7, "days").toDate();
this.endDate = now.toDate();
break;
case "month":
this.startDate = now.clone().subtract(30, "days").toDate();
this.endDate = now.toDate();
break;
}
}
@action
onPeriodSelect(period) {
this.selectedPeriod = period;
this.isCustomDateActive = false;
this.setPeriodDates(period);
this.fetchData();
}
@action
onCustomDateClick() {
this.isCustomDateActive = !this.isCustomDateActive;
if (this.isCustomDateActive) {
this.selectedPeriod = null;
}
}
@action
onDateChange() {
this.isCustomDateActive = true;
this.selectedPeriod = null;
this.fetchData();
}
@action
onChangeDateRange({ from, to }) {
this._startDate = from;
this._endDate = to;
}
@action
onRefreshDateRange() {
this.startDate = this._startDate;
this.endDate = this._endDate;
this.fetchData();
}
<template>
<div class="ai-usage">
<div class="ai-usage__filters">
<div class="ai-usage__filters-dates">
<div class="ai-usage__period-buttons">
{{#each this.periodOptions as |option|}}
<button
type="button"
class="btn
{{if
(eq this.selectedPeriod option.id)
'btn-primary'
'btn-default'
}}"
{{on "click" (fn this.onPeriodSelect option.id)}}
>
{{option.name}}
</button>
{{/each}}
<button
type="button"
class="btn
{{if this.isCustomDateActive 'btn-primary' 'btn-default'}}"
{{on "click" this.onCustomDateClick}}
>
Custom...
</button>
</div>
{{#if this.isCustomDateActive}}
<div class="ai-usage__custom-date-pickers">
<DateTimeInputRange
@from={{this.startDate}}
@to={{this.endDate}}
@onChange={{this.onChangeDateRange}}
@showFromTime={{false}}
@showToTime={{false}}
/>
<button
type="button"
class="btn btn-default"
{{on "click" this.onRefreshDateRange}}
>
{{i18n "refresh"}}
</button>
</div>
{{/if}}
</div>
<div class="ai-usage__filters-row">
<ComboBox
@value={{this.selectedFeature}}
@content={{this.availableFeatures}}
@onChange={{this.onFeatureChanged}}
@options={{hash none="discourse_ai.usage.all_features"}}
class="ai-usage__feature-selector"
/>
<ComboBox
@value={{this.selectedModel}}
@content={{this.availableModels}}
@onChange={{this.onModelChanged}}
@options={{hash none="discourse_ai.usage.all_models"}}
class="ai-usage__model-selector"
/>
</div>
{{#if this.data}}
<div class="ai-usage__summary">
<h3 class="ai-usage__summary-title">
{{i18n "discourse_ai.usage.summary"}}
</h3>
<div class="ai-usage__summary-stats">
<div class="ai-usage__summary-stat">
<span class="label">{{i18n
"discourse_ai.usage.total_requests"
}}</span>
<span class="value">{{this.data.summary.total_requests}}</span>
</div>
<div class="ai-usage__summary-stat">
<span class="label">{{i18n
"discourse_ai.usage.total_tokens"
}}</span>
<span class="value">{{this.data.summary.total_tokens}}</span>
</div>
<div class="ai-usage__summary-stat">
<span class="label">{{i18n
"discourse_ai.usage.request_tokens"
}}</span>
<span
class="value"
>{{this.data.summary.total_request_tokens}}</span>
</div>
<div class="ai-usage__summary-stat">
<span class="label">{{i18n
"discourse_ai.usage.response_tokens"
}}</span>
<span
class="value"
>{{this.data.summary.total_response_tokens}}</span>
</div>
<div class="ai-usage__summary-stat">
<span class="label">{{i18n
"discourse_ai.usage.cached_tokens"
}}</span>
<span
class="value"
>{{this.data.summary.total_cached_tokens}}</span>
</div>
</div>
</div>
<div class="ai-usage__charts">
<div class="ai-usage__chart-container">
<h3 class="ai-usage__chart-title">
{{i18n "discourse_ai.usage.tokens_over_time"}}
</h3>
<Chart
@chartConfig={{this.chartConfig}}
class="ai-usage__chart"
/>
</div>
<div class="ai-usage__breakdowns">
<div class="ai-usage__users">
<h3 class="ai-usage__users-title">
{{i18n "discourse_ai.usage.users_breakdown"}}
</h3>
<table class="ai-usage__users-table">
<thead>
<tr>
<th>{{i18n "discourse_ai.usage.username"}}</th>
<th>{{i18n "discourse_ai.usage.usage_count"}}</th>
<th>{{i18n "discourse_ai.usage.total_tokens"}}</th>
</tr>
</thead>
<tbody>
{{#each this.data.users as |user|}}
<tr class="ai-usage__users-row">
<td class="ai-usage__users-cell">
<div class="user-info">
<LinkTo
@route="user"
@model={{user.username}}
class="username"
>
{{avatar user imageSize="tiny"}}
{{user.username}}
</LinkTo>
</div></td>
<td
class="ai-usage__users-cell"
>{{user.usage_count}}</td>
<td
class="ai-usage__users-cell"
>{{user.total_tokens}}</td>
</tr>
{{/each}}
</tbody>
</table>
</div>
<div class="ai-usage__features">
<h3 class="ai-usage__features-title">
{{i18n "discourse_ai.usage.features_breakdown"}}
</h3>
<table class="ai-usage__features-table">
<thead>
<tr>
<th>{{i18n "discourse_ai.usage.feature"}}</th>
<th>{{i18n "discourse_ai.usage.usage_count"}}</th>
<th>{{i18n "discourse_ai.usage.total_tokens"}}</th>
</tr>
</thead>
<tbody>
{{#each this.data.features as |feature|}}
<tr class="ai-usage__features-row">
<td
class="ai-usage__features-cell"
>{{feature.feature_name}}</td>
<td
class="ai-usage__features-cell"
>{{feature.usage_count}}</td>
<td
class="ai-usage__features-cell"
>{{feature.total_tokens}}</td>
</tr>
{{/each}}
</tbody>
</table>
</div>
<div class="ai-usage__models">
<h3 class="ai-usage__models-title">
{{i18n "discourse_ai.usage.models_breakdown"}}
</h3>
<table class="ai-usage__models-table">
<thead>
<tr>
<th>{{i18n "discourse_ai.usage.model"}}</th>
<th>{{i18n "discourse_ai.usage.usage_count"}}</th>
<th>{{i18n "discourse_ai.usage.total_tokens"}}</th>
</tr>
</thead>
<tbody>
{{#each this.data.models as |model|}}
<tr class="ai-usage__models-row">
<td class="ai-usage__models-cell">{{model.llm}}</td>
<td
class="ai-usage__models-cell"
>{{model.usage_count}}</td>
<td
class="ai-usage__models-cell"
>{{model.total_tokens}}</td>
</tr>
{{/each}}
</tbody>
</table>
</div>
</div>
</div>
{{/if}}
</div>
</div>
</template>
}

View File

@ -24,6 +24,10 @@ export default {
label: "discourse_ai.tools.short_title",
route: "adminPlugins.show.discourse-ai-tools",
},
{
label: "discourse_ai.usage.short_title",
route: "adminPlugins.show.discourse-ai-usage",
},
]);
});
},

View File

@ -0,0 +1,168 @@
.ai-usage {
--chart-response-color: rgba(75, 192, 192, 0.8);
--chart-request-color: rgba(153, 102, 255, 0.8);
--chart-cached-color: rgba(153, 102, 255, 0.4);
padding: 1em;
&__filters-dates {
display: flex;
flex-direction: column;
gap: 1em;
margin-bottom: 1em;
}
&__period-buttons {
display: flex;
gap: 0.5em;
align-items: center;
.btn {
padding: 0.5em 1em;
&.btn-primary {
background: var(--tertiary);
color: var(--secondary);
}
}
}
&__custom-date-pickers {
display: flex;
gap: 1em;
align-items: center;
margin-top: 0.5em;
}
&__filters {
margin-bottom: 2em;
}
&__filters-period {
display: flex;
align-items: center;
gap: 1em;
}
.d-date-time-input-range {
display: flex;
gap: 1em;
align-items: center;
}
.d-date-time-input-range .from {
margin: 0;
}
&__period-label {
font-weight: bold;
}
&__summary {
margin: 2em 0;
padding: 1.5em;
background: var(--primary-very-low);
border-radius: 0.5em;
}
&__summary-title {
margin-bottom: 1em;
color: var(--primary);
font-size: 1.2em;
}
&__summary-stats {
display: grid;
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
gap: 1em;
}
&__summary-stat {
display: flex;
flex-direction: column;
padding: 1em;
background: var(--secondary);
border-radius: 0.25em;
.label {
color: var(--primary-medium);
font-size: 0.875em;
margin-bottom: 0.5em;
}
.value {
color: var(--primary);
font-size: 1.5em;
font-weight: bold;
}
}
&__charts {
margin-top: 2em;
}
&__chart {
position: relative;
}
&__chart-container {
margin-bottom: 2em;
}
&__chart-title {
margin-bottom: 1em;
}
&__breakdowns {
display: grid;
grid-template-columns: 1fr 1fr;
gap: 2em;
margin-top: 2em;
@media (max-width: 768px) {
grid-template-columns: 1fr;
}
}
&__features,
&__users,
&__models {
background: var(--primary-very-low);
padding: 1em;
border-radius: 0.5em;
}
&__features-title,
&__users-title,
&__models-title {
margin-bottom: 1em;
}
&__features-table,
&__users-table,
&__models-table {
width: 100%;
border-collapse: collapse;
th {
text-align: left;
padding: 0.5em;
border-bottom: 2px solid var(--primary-low);
}
}
&__features-row,
&__users-row,
&__models-row {
&:hover {
background: var(--primary-low);
}
}
&__features-cell,
&__users-cell,
&__models-cell {
padding: 0.5em;
border-bottom: 1px solid var(--primary-low);
}
}

View File

@ -126,6 +126,26 @@ en:
modals:
select_option: "Select an option..."
usage:
short_title: "Usage"
summary: "Summary"
total_tokens: "Total tokens"
tokens_over_time: "Tokens over time"
features_breakdown: "Usage per feature"
feature: "Feature"
usage_count: "Usage count"
model: "Model"
models_breakdown: "Usage per model"
users_breakdown: "Usage per user"
all_features: "All features"
all_models: "All models"
username: "Username"
total_requests: "Total requests"
request_tokens: "Request tokens"
response_tokens: "Response tokens"
cached_tokens: "Cached tokens"
ai_persona:
tool_strategies:
all: "Apply to all replies"

View File

@ -77,6 +77,8 @@ Discourse::Application.routes.draw do
get "/rag-document-fragments/files/status",
to: "discourse_ai/admin/rag_document_fragments#indexing_status_check"
get "/ai-usage", to: "discourse_ai/admin/ai_usage#show"
resources :ai_llms,
only: %i[index create show update destroy],
path: "ai-llms",

View File

@ -0,0 +1,9 @@
# frozen_string_literal: true
class AddCachedTokensToAiApiAuditLog < ActiveRecord::Migration[7.2]
def change
add_column :ai_api_audit_logs, :cached_tokens, :integer
add_index :ai_api_audit_logs, %i[created_at feature_name]
add_index :ai_api_audit_logs, %i[created_at language_model]
end
end

View File

@ -22,7 +22,7 @@ module DiscourseAi
attr_reader :bot_user
attr_accessor :persona
def get_updated_title(conversation_context, post)
def get_updated_title(conversation_context, post, user)
system_insts = <<~TEXT.strip
You are titlebot. Given a conversation, you will suggest a title.
@ -61,7 +61,7 @@ module DiscourseAi
DiscourseAi::Completions::Llm
.proxy(model)
.generate(title_prompt, user: post.user, feature_name: "bot_title")
.generate(title_prompt, user: user, feature_name: "bot_title")
.strip
.split("\n")
.last

View File

@ -245,11 +245,11 @@ module DiscourseAi
builder.to_a
end
def title_playground(post)
def title_playground(post, user)
context = conversation_context(post)
bot
.get_updated_title(context, post)
.get_updated_title(context, post, user)
.tap do |new_title|
PostRevisor.new(post.topic.first_post, post.topic).revise!(
bot.bot_user,
@ -544,7 +544,7 @@ module DiscourseAi
post_streamer&.finish(skip_callback: true)
publish_final_update(reply_post) if stream_reply
if reply_post && post.post_number == 1 && post.topic.private_message?
title_playground(reply_post)
title_playground(reply_post, post.user)
end
end

View File

@ -97,6 +97,7 @@ module DiscourseAi
def final_log_update(log)
log.request_tokens = processor.prompt_tokens if processor.prompt_tokens
log.response_tokens = processor.completion_tokens if processor.completion_tokens
log.cached_tokens = processor.cached_tokens if processor.cached_tokens
end
def decode(response_raw)

View File

@ -1,13 +1,14 @@
# frozen_string_literal: true
module DiscourseAi::Completions
class OpenAiMessageProcessor
attr_reader :prompt_tokens, :completion_tokens
attr_reader :prompt_tokens, :completion_tokens, :cached_tokens
def initialize(partial_tool_calls: false)
@tool = nil
@tool_arguments = +""
@prompt_tokens = nil
@completion_tokens = nil
@cached_tokens = nil
@partial_tool_calls = partial_tool_calls
end
@ -121,6 +122,7 @@ module DiscourseAi::Completions
def update_usage(json)
@prompt_tokens ||= json.dig(:usage, :prompt_tokens)
@completion_tokens ||= json.dig(:usage, :completion_tokens)
@cached_tokens ||= json.dig(:usage, :prompt_tokens_details, :cached_tokens)
end
end
end

148
lib/completions/report.rb Normal file
View File

@ -0,0 +1,148 @@
# frozen_string_literal: true
module DiscourseAi
module Completions
class Report
UNKNOWN_FEATURE = "unknown"
USER_LIMIT = 50
attr_reader :start_date, :end_date, :base_query
def initialize(start_date: 30.days.ago, end_date: Time.current)
@start_date = start_date.beginning_of_day
@end_date = end_date.end_of_day
@base_query = AiApiAuditLog.where(created_at: @start_date..@end_date)
end
def total_tokens
stats.total_tokens
end
def total_cached_tokens
stats.total_cached_tokens
end
def total_request_tokens
stats.total_request_tokens
end
def total_response_tokens
stats.total_response_tokens
end
def total_requests
stats.total_requests
end
def stats
@stats ||=
base_query.select(
"COUNT(*) as total_requests",
"SUM(request_tokens + response_tokens) as total_tokens",
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
"SUM(request_tokens) as total_request_tokens",
"SUM(response_tokens) as total_response_tokens",
)[
0
]
end
def guess_period(period = nil)
period = nil if %i[day month hour].include?(period)
period ||
case @end_date - @start_date
when 0..3.days
:hour
when 3.days..90.days
:day
else
:month
end
end
def tokens_by_period(period = nil)
period = guess_period(period)
base_query
.group("DATE_TRUNC('#{period}', created_at)")
.order("DATE_TRUNC('#{period}', created_at)")
.select(
"DATE_TRUNC('#{period}', created_at) as period",
"SUM(request_tokens + response_tokens) as total_tokens",
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
"SUM(request_tokens) as total_request_tokens",
"SUM(response_tokens) as total_response_tokens",
)
end
def user_breakdown
base_query
.joins(:user)
.group(:user_id, "users.username", "users.uploaded_avatar_id")
.order("usage_count DESC")
.limit(USER_LIMIT)
.select(
"users.username",
"users.uploaded_avatar_id",
"COUNT(*) as usage_count",
"SUM(request_tokens + response_tokens) as total_tokens",
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
"SUM(request_tokens) as total_request_tokens",
"SUM(response_tokens) as total_response_tokens",
)
end
def feature_breakdown
base_query
.group(:feature_name)
.order("usage_count DESC")
.select(
"case when coalesce(feature_name, '') = '' then '#{UNKNOWN_FEATURE}' else feature_name end as feature_name",
"COUNT(*) as usage_count",
"SUM(request_tokens + response_tokens) as total_tokens",
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
"SUM(request_tokens) as total_request_tokens",
"SUM(response_tokens) as total_response_tokens",
)
end
def model_breakdown
base_query
.group(:language_model)
.order("usage_count DESC")
.select(
"language_model as llm",
"COUNT(*) as usage_count",
"SUM(request_tokens + response_tokens) as total_tokens",
"SUM(COALESCE(cached_tokens,0)) as total_cached_tokens",
"SUM(request_tokens) as total_request_tokens",
"SUM(response_tokens) as total_response_tokens",
)
end
def tokens_per_hour
tokens_by_period(:hour)
end
def tokens_per_day
tokens_by_period(:day)
end
def tokens_per_month
tokens_by_period(:month)
end
def filter_by_feature(feature_name)
if feature_name == UNKNOWN_FEATURE
@base_query = base_query.where("coalesce(feature_name, '') = ''")
else
@base_query = base_query.where(feature_name: feature_name)
end
self
end
def filter_by_model(model_name)
@base_query = base_query.where(language_model: model_name)
self
end
end
end
end

View File

@ -37,6 +37,8 @@ register_asset "stylesheets/modules/sentiment/common/dashboard.scss"
register_asset "stylesheets/modules/llms/common/ai-llms-editor.scss"
register_asset "stylesheets/modules/llms/common/usage.scss"
register_asset "stylesheets/modules/ai-bot/common/ai-tools.scss"
register_asset "stylesheets/modules/ai-bot/common/ai-artifact.scss"

View File

@ -292,7 +292,7 @@ data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.c
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[{"index":0,"delta":{},"logprobs":null,"finish_reason":"tool_calls"}],"usage":null}
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[],"usage":{"prompt_tokens":735,"completion_tokens":156,"total_tokens":891,"prompt_tokens_details":{"cached_tokens":0,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
data: {"id":"chatcmpl-ATimVYagKnCWQ0VXY0Hn2SDjRuN6B","object":"chat.completion.chunk","created":1731647015,"model":"gpt-4o-2024-08-06","system_fingerprint":"fp_45cf54deae","choices":[],"usage":{"prompt_tokens":735,"completion_tokens":156,"total_tokens":891,"prompt_tokens_details":{"cached_tokens":33,"audio_tokens":0},"completion_tokens_details":{"reasoning_tokens":0,"audio_tokens":0,"accepted_prediction_tokens":0,"rejected_prediction_tokens":0}}}
data: [DONE]

View File

@ -657,6 +657,9 @@ TEXT
end
end
end
audit_log = AiApiAuditLog.order("id desc").first
expect(audit_log.cached_tokens).to eq(33)
end
it "properly handles spaces in tools payload and partial tool calls" do

View File

@ -829,8 +829,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do
it "updates the title using bot suggestions" do
DiscourseAi::Completions::Llm.with_prepared_responses([expected_response]) do
playground.title_playground(third_post)
playground.title_playground(third_post, user)
expect(pm.reload.title).to eq(expected_response)
end
end

View File

@ -0,0 +1,140 @@
# frozen_string_literal: true
require "rails_helper"
RSpec.describe DiscourseAi::Admin::AiUsageController do
fab!(:admin)
fab!(:user)
let(:usage_path) { "/admin/plugins/discourse-ai/ai-usage.json" }
before { SiteSetting.discourse_ai_enabled = true }
context "when logged in as admin" do
before { sign_in(admin) }
describe "#show" do
fab!(:log1) do
AiApiAuditLog.create!(
provider_id: 1,
feature_name: "summarize",
language_model: "gpt-4",
request_tokens: 100,
response_tokens: 50,
created_at: 1.day.ago,
)
end
fab!(:log2) do
AiApiAuditLog.create!(
provider_id: 1,
feature_name: "translate",
language_model: "gpt-3.5",
request_tokens: 200,
response_tokens: 100,
created_at: 2.days.ago,
)
end
it "returns correct data structure" do
get usage_path
expect(response.status).to eq(200)
json = response.parsed_body
expect(json).to have_key("data")
expect(json).to have_key("features")
expect(json).to have_key("models")
expect(json).to have_key("summary")
end
it "respects date filters" do
get usage_path, params: { start_date: 3.days.ago.to_date, end_date: 1.day.ago.to_date }
json = response.parsed_body
expect(json["summary"]["total_tokens"]).to eq(450) # sum of all tokens
end
it "filters by feature" do
get usage_path, params: { feature: "summarize" }
json = response.parsed_body
features = json["features"]
expect(features.length).to eq(1)
expect(features.first["feature_name"]).to eq("summarize")
expect(features.first["total_tokens"]).to eq(150)
end
it "filters by model" do
get usage_path, params: { model: "gpt-3.5" }
json = response.parsed_body
models = json["models"]
expect(models.length).to eq(1)
expect(models.first["llm"]).to eq("gpt-3.5")
expect(models.first["total_tokens"]).to eq(300)
end
it "handles different period groupings" do
get usage_path, params: { period: "hour" }
expect(response.status).to eq(200)
get usage_path, params: { period: "month" }
expect(response.status).to eq(200)
end
end
# spec/requests/admin/ai_usage_controller_spec.rb
context "with hourly data" do
before do
freeze_time Time.parse("2021-02-01 00:00:00")
# Create data points across different hours
[23.hours.ago, 22.hours.ago, 21.hours.ago, 20.hours.ago].each do |time|
AiApiAuditLog.create!(
provider_id: 1,
feature_name: "summarize",
language_model: "gpt-4",
request_tokens: 100,
response_tokens: 50,
created_at: time,
)
end
end
it "returns hourly data when period is day" do
get usage_path, params: { start_date: 1.day.ago.to_date, end_date: Time.current.to_date }
expect(response.status).to eq(200)
json = response.parsed_body
expect(json["data"].length).to eq(4)
data_by_hour = json["data"].index_by { |d| Time.parse(d["period"]).hour }
expect(data_by_hour.keys.length).to eq(4)
expect(data_by_hour.first[1]["total_tokens"]).to eq(150)
end
end
end
context "when not admin" do
before { sign_in(user) }
it "blocks access" do
get usage_path
expect(response.status).to eq(404)
end
end
context "when plugin disabled" do
before do
SiteSetting.discourse_ai_enabled = false
sign_in(admin)
end
it "returns error" do
get usage_path
expect(response.status).to eq(404)
end
end
end