FEATURE: Enhance AI debugging capabilities and improve interface adjustments (#577)

* FIX: various RAG edge cases

- Nicer text to describe RAG, avoids the word RAG
- Do not attempt to save persona when removing uploads and it is not created
- Remove old code that avoided touching rag params on create

* FIX: Missing pause button for persona users

* Feature: allow specific users to debug ai request / response chains

This can help users easily tune RAG and figure out what is going
on with requests.

* discourse helper so it does not explode

* fix test

* simplify implementation
This commit is contained in:
Sam 2024-04-15 23:22:06 +10:00 committed by GitHub
parent 3e54697c5a
commit 4a29f8ed1c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 385 additions and 26 deletions

View File

@ -6,6 +6,21 @@ module DiscourseAi
requires_plugin ::DiscourseAi::PLUGIN_NAME
requires_login
def show_debug_info
post = Post.find(params[:post_id])
guardian.ensure_can_debug_ai_bot_conversation!(post)
posts =
Post
.where("post_number <= ?", post.post_number)
.where(topic_id: post.topic_id)
.order("post_number DESC")
debug_info = AiApiAuditLog.where(post: posts).order(created_at: :desc).first
render json: debug_info, status: 200
end
def stop_streaming_response
post = Post.find(params[:post_id])
guardian.ensure_can_see!(post)

View File

@ -1,6 +1,9 @@
# frozen_string_literal: true
class AiApiAuditLog < ActiveRecord::Base
belongs_to :post
belongs_to :topic
module Provider
OpenAI = 1
Anthropic = 2

View File

@ -76,6 +76,32 @@ class AiPersona < ActiveRecord::Base
.map(&:class_instance)
end
def self.persona_users(user: nil)
persona_users =
persona_cache[:persona_users] ||= AiPersona
.where(enabled: true)
.joins(:user)
.pluck(
"ai_personas.id, users.id, users.username_lower, allowed_group_ids, default_llm, mentionable",
)
.map do |id, user_id, username, allowed_group_ids, default_llm, mentionable|
{
id: id,
user_id: user_id,
username: username,
allowed_group_ids: allowed_group_ids,
default_llm: default_llm,
mentionable: mentionable,
}
end
if user
persona_users.select { |mentionable| user.in_any_groups?(mentionable[:allowed_group_ids]) }
else
persona_users
end
end
def self.mentionables(user: nil)
all_mentionables =
persona_cache[:mentionable_usernames] ||= AiPersona

View File

@ -22,14 +22,10 @@ const CREATE_ATTRIBUTES = [
"vision_enabled",
"vision_max_pixels",
"rag_uploads",
];
// rag params are populated on save, only show it when editing
const ATTRIBUTES = CREATE_ATTRIBUTES.concat([
"rag_chunk_tokens",
"rag_chunk_overlap_tokens",
"rag_conversation_chunks",
]);
];
const SYSTEM_ATTRIBUTES = [
"id",
@ -129,7 +125,7 @@ export default class AiPersona extends RestModel {
updateProperties() {
let attrs = this.system
? this.getProperties(SYSTEM_ATTRIBUTES)
: this.getProperties(ATTRIBUTES);
: this.getProperties(CREATE_ATTRIBUTES);
attrs.id = this.id;
this.populateCommandOptions(attrs);
@ -143,7 +139,7 @@ export default class AiPersona extends RestModel {
}
workingCopy() {
let attrs = this.getProperties(ATTRIBUTES);
let attrs = this.getProperties(CREATE_ATTRIBUTES);
this.populateCommandOptions(attrs);
return AiPersona.create(attrs);
}

View File

@ -217,7 +217,9 @@ export default class PersonaEditor extends Component {
@action
removeUpload(upload) {
this.editingModel.rag_uploads.removeObject(upload);
this.save();
if (!this.args.model.isNew) {
this.save();
}
}
async toggleField(field, sortPersonas) {

View File

@ -0,0 +1,136 @@
import Component from "@glimmer/component";
import { tracked } from "@glimmer/tracking";
import { action } from "@ember/object";
import { next } from "@ember/runloop";
import { htmlSafe } from "@ember/template";
import DButton from "discourse/components/d-button";
import DModal from "discourse/components/d-modal";
import { ajax } from "discourse/lib/ajax";
import { clipboardCopy, escapeExpression } from "discourse/lib/utilities";
import i18n from "discourse-common/helpers/i18n";
import discourseLater from "discourse-common/lib/later";
import I18n from "discourse-i18n";
export default class DebugAiModal extends Component {
@tracked info = null;
@tracked justCopiedText = "";
constructor() {
super(...arguments);
next(() => {
this.loadApiRequestInfo();
});
}
get htmlContext() {
if (!this.info) {
return "";
}
let parsed;
try {
parsed = JSON.parse(this.info.raw_request_payload);
} catch (e) {
return this.info.raw_request_payload;
}
return htmlSafe(this.jsonToHtml(parsed));
}
jsonToHtml(json) {
let html = "<ul>";
for (let key in json) {
if (!json.hasOwnProperty(key)) {
continue;
}
html += "<li>";
if (typeof json[key] === "object" && Array.isArray(json[key])) {
html += `<strong>${escapeExpression(key)}:</strong> ${this.jsonToHtml(
json[key]
)}`;
} else if (typeof json[key] === "object") {
html += `<strong>${escapeExpression(
key
)}:</strong> <ul><li>${this.jsonToHtml(json[key])}</li></ul>`;
} else {
let value = json[key];
if (typeof value === "string") {
value = escapeExpression(value);
value = value.replace(/\n/g, "<br>");
}
html += `<strong>${escapeExpression(key)}:</strong> ${value}`;
}
html += "</li>";
}
html += "</ul>";
return html;
}
@action
copyRequest() {
this.copy(this.info.raw_request_payload);
}
@action
copyResponse() {
this.copy(this.info.raw_response_payload);
}
copy(text) {
clipboardCopy(text);
this.justCopiedText = I18n.t("discourse_ai.ai_bot.conversation_shared");
discourseLater(() => {
this.justCopiedText = "";
}, 2000);
}
loadApiRequestInfo() {
ajax(
`/discourse-ai/ai-bot/post/${this.args.model.id}/show-debug-info.json`
).then((result) => {
this.info = result;
});
}
<template>
<DModal
class="ai-debug-modal"
@title={{i18n "discourse_ai.ai_bot.debug_ai_modal.title"}}
@closeModal={{@closeModal}}
>
<:body>
<div class="ai-debug-modal__tokens">
<span>
{{i18n "discourse_ai.ai_bot.debug_ai_modal.request_tokens"}}
{{this.info.request_tokens}}
</span>
<span>
{{i18n "discourse_ai.ai_bot.debug_ai_modal.response_tokens"}}
{{this.info.response_tokens}}
</span>
</div>
<div class="debug-ai-modal__preview">
{{this.htmlContext}}
</div>
</:body>
<:footer>
<DButton
class="btn confirm"
@icon="copy"
@action={{this.copyRequest}}
@label="discourse_ai.ai_bot.debug_ai_modal.copy_request"
/>
<DButton
class="btn confirm"
@icon="copy"
@action={{this.copyResponse}}
@label="discourse_ai.ai_bot.debug_ai_modal.copy_response"
/>
<span class="ai-debut-modal__just-copied">{{this.justCopiedText}}</span>
</:footer>
</DModal>
</template>
}

View File

@ -34,7 +34,7 @@ export default class PersonaRagUploader extends Component.extend(
this.filteredUploads = this.ragUploads || [];
if (this.ragUploads?.length) {
if (this.ragUploads?.length && this.persona?.id) {
ajax(
`/admin/plugins/discourse-ai/ai-personas/${this.persona.id}/files/status.json`
).then((statuses) => {

View File

@ -3,6 +3,7 @@ import { ajax } from "discourse/lib/ajax";
import { popupAjaxError } from "discourse/lib/ajax-error";
import { withPluginApi } from "discourse/lib/plugin-api";
import { registerWidgetShim } from "discourse/widgets/render-glimmer";
import DebugAiModal from "../discourse/components/modal/debug-ai-modal";
import ShareModal from "../discourse/components/modal/share-modal";
import streamText from "../discourse/lib/ai-streamer";
import copyConversation from "../discourse/lib/copy-conversation";
@ -11,6 +12,7 @@ import AiBotHeaderIcon from "../discourse/components/ai-bot-header-icon";
import { showShareConversationModal } from "../discourse/lib/ai-bot-helper";
let enabledChatBotIds = [];
let allowDebug = false;
function isGPTBot(user) {
return user && enabledChatBotIds.includes(user.id);
}
@ -102,6 +104,44 @@ function initializePersonaDecorator(api) {
const MAX_PERSONA_USER_ID = -1200;
function initializeDebugButton(api) {
const currentUser = api.getCurrentUser();
if (!currentUser || !currentUser.ai_enabled_chat_bots || !allowDebug) {
return;
}
let debugAiResponse = async function ({ post }) {
const modal = api.container.lookup("service:modal");
modal.show(DebugAiModal, { model: post });
};
api.addPostMenuButton("debugAi", (post) => {
if (post.topic?.archetype !== "private_message") {
return;
}
if (
!currentUser.ai_enabled_chat_bots.any(
(bot) => post.username === bot.username
)
) {
// special handling for personas (persona bot users start at ID -1200 and go down)
if (post.user_id > MAX_PERSONA_USER_ID) {
return;
}
}
return {
action: debugAiResponse,
icon: "info",
className: "post-action-menu__debug-ai",
title: "discourse_ai.ai_bot.debug_ai",
position: "first",
};
});
}
function initializeShareButton(api) {
const currentUser = api.getCurrentUser();
if (!currentUser || !currentUser.ai_enabled_chat_bots) {
@ -113,6 +153,7 @@ function initializeShareButton(api) {
await copyConversation(post.topic, 1, post.post_number);
showFeedback("discourse_ai.ai_bot.conversation_shared");
} else {
const modal = api.container.lookup("service:modal");
modal.show(ShareModal, { model: post });
}
};
@ -142,8 +183,6 @@ function initializeShareButton(api) {
position: "first",
};
});
const modal = api.container.lookup("service:modal");
}
function initializeShareTopicButton(api) {
@ -177,9 +216,11 @@ export default {
if (user?.ai_enabled_chat_bots) {
enabledChatBotIds = user.ai_enabled_chat_bots.map((bot) => bot.id);
allowDebug = user.can_debug_ai_bot_conversations;
withPluginApi("1.6.0", attachHeaderIcon);
withPluginApi("1.6.0", initializeAIBotReplies);
withPluginApi("1.6.0", initializePersonaDecorator);
withPluginApi("1.22.0", (api) => initializeDebugButton(api, container));
withPluginApi("1.22.0", (api) => initializeShareButton(api, container));
withPluginApi("1.22.0", (api) =>
initializeShareTopicButton(api, container)

View File

@ -141,3 +141,23 @@ details.ai-quote {
span.onebox-ai-llm-title {
font-weight: bold;
}
.d-modal.ai-debug-modal {
--modal-max-width: 99%;
ul {
padding-left: 1em;
}
li {
margin-bottom: 0.2em;
}
li > ul {
margin-top: 0.2em;
margin-bottom: 0.2em;
}
}
.ai-debug-modal__tokens span {
display: block;
}

View File

@ -160,11 +160,11 @@ en:
priority: Priority
priority_help: Priority personas are displayed to users at the top of the persona list. If multiple personas have priority, they will be sorted alphabetically.
command_options: "Command Options"
rag_chunk_tokens: "RAG Chunk Tokens"
rag_chunk_tokens: "Upload Chunk Tokens"
rag_chunk_tokens_help: "The number of tokens to use for each chunk in the RAG model. Increase to increase the amount of context the AI can use. (changing will re-index all uploads)"
rag_chunk_overlap_tokens: "RAG Chunk Overlap Tokens"
rag_chunk_overlap_tokens: "Upload Chunk Overlap Tokens"
rag_chunk_overlap_tokens_help: "The number of tokens to overlap between chunks in the RAG model. (changing will re-index all uploads)"
rag_conversation_chunks: "RAG Conversation Chunks"
rag_conversation_chunks: "Search Conversation Chunks"
rag_conversation_chunks_help: "The number of chunks to use for the RAG model searches. Increase to increase the amount of context the AI can use."
what_are_personas: "What are AI Personas?"
no_persona_selected: |
@ -187,6 +187,8 @@ en:
indexed: "Indexed"
indexing: "Indexing"
uploaded: "Ready to be indexed"
uploading: "Uploading..."
remove: "Remove upload"
related_topics:
title: "Related Topics"
@ -256,6 +258,13 @@ en:
shortcut_title: "Start a PM with an AI bot"
share: "Share AI conversation"
conversation_shared: "Conversation copied"
debug_ai: "View raw AI request and response"
debug_ai_modal:
title: "View AI interaction"
copy_request: "Copy request"
copy_response: "Copy response"
request_tokens: "Request tokens:"
response_tokens: "Response tokens:"
share_full_topic_modal:
title: "Share Conversation Publicly"

View File

@ -96,6 +96,7 @@ en:
ai_bot_enabled: "Enable the AI Bot module."
ai_bot_enable_chat_warning: "Display a warning when PM chat is initiated. Can be overriden by editing the translation string: discourse_ai.ai_bot.pm_warning"
ai_bot_allowed_groups: "When the GPT Bot has access to the PM, it will reply to members of these groups."
ai_bot_debugging_allowed_groups: "Allow these groups to see a debug button on posts which displays the raw AI request and response"
ai_bot_public_sharing_allowed_groups: "Allow these groups to share AI personal messages with the public via a unique publicly available link"
ai_bot_enabled_chat_bots: "Available models to act as an AI Bot"
ai_bot_add_to_header: "Display a button in the header to start a PM with a AI Bot"

View File

@ -16,8 +16,9 @@ DiscourseAi::Engine.routes.draw do
end
scope module: :ai_bot, path: "/ai-bot", defaults: { format: :json } do
post "post/:post_id/stop-streaming" => "bot#stop_streaming_response"
get "bot-username" => "bot#show_bot_username"
get "post/:post_id/show-debug-info" => "bot#show_debug_info"
post "post/:post_id/stop-streaming" => "bot#stop_streaming_response"
end
scope module: :ai_bot, path: "/ai-bot/shared-ai-conversations" do

View File

@ -313,6 +313,11 @@ discourse_ai:
ai_bot_enable_chat_warning:
default: false
client: true
ai_bot_debugging_allowed_groups:
type: group_list
list_type: compact
default: ""
allow_any: false
ai_bot_allowed_groups:
type: group_list
list_type: compact

View File

@ -33,7 +33,6 @@ module DiscourseAi
system_insts,
messages: conversation_context,
topic_id: post.topic_id,
post_id: post.id,
)
title_prompt.push(

View File

@ -139,6 +139,16 @@ module DiscourseAi
end
end
plugin.add_to_serializer(
:current_user,
:can_debug_ai_bot_conversations,
include_condition: -> do
SiteSetting.ai_bot_enabled && scope.authenticated? &&
SiteSetting.ai_bot_debugging_allowed_groups.present? &&
scope.user.in_any_groups?(SiteSetting.ai_bot_debugging_allowed_groups_map)
end,
) { true }
plugin.add_to_serializer(
:current_user,
:ai_enabled_chat_bots,
@ -163,11 +173,15 @@ module DiscourseAi
SQL
bots.each { |hash| hash["model_name"] = model_map[hash["id"]] }
mentionables = AiPersona.mentionables(user: scope.user)
if mentionables.present?
persona_users = AiPersona.persona_users(user: scope.user)
if persona_users.present?
bots.concat(
mentionables.map do |mentionable|
{ "id" => mentionable[:user_id], "username" => mentionable[:username] }
persona_users.map do |persona_user|
{
"id" => persona_user[:user_id],
"username" => persona_user[:username],
"mentionable" => persona_user[:mentionable],
}
end,
)
end

View File

@ -118,10 +118,15 @@ module DiscourseAi
category = categories[topic["category_id"]]
category_names = +""
if category["parent_category_id"]
category_names << categories[category["parent_category_id"]]["name"] << " > "
# TODO @nbianca: this is broken now cause we are not getting child categories
# to avoid erroring out we simply skip
# sideloading from search would probably be easier
if category
if category["parent_category_id"]
category_names << categories[category["parent_category_id"]]["name"] << " > "
end
category_names << category["name"]
end
category_names << category["name"]
row = {
title: topic["title"],
url: "https://meta.discourse.org/t/-/#{post["topic_id"]}/#{post["post_number"]}",

View File

@ -2,6 +2,19 @@
module DiscourseAi
module GuardianExtensions
def can_debug_ai_bot_conversation?(target)
return false if anonymous?
return false if !can_see?(target)
if !SiteSetting.discourse_ai_enabled || !SiteSetting.ai_bot_enabled ||
!SiteSetting.ai_bot_debugging_allowed_groups_map.any?
return false
end
user.in_any_groups?(SiteSetting.ai_bot_debugging_allowed_groups_map)
end
def can_share_ai_bot_conversation?(target)
return false if anonymous?

View File

@ -23,6 +23,14 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
bot_allowed_group.add(admin)
end
it "adds a can_debug_ai_bot_conversations method to current user" do
SiteSetting.ai_bot_debugging_allowed_groups = bot_allowed_group.id.to_s
serializer = CurrentUserSerializer.new(admin, scope: Guardian.new(admin))
serializer = serializer.as_json
expect(serializer[:current_user][:can_debug_ai_bot_conversations]).to eq(true)
end
it "adds mentionables to current_user_serializer" do
Group.refresh_automatic_groups!
@ -42,6 +50,22 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
persona_bot = bots.find { |bot| bot["id"] == persona.user_id }
expect(persona_bot["username"]).to eq(persona.user.username)
expect(persona_bot["mentionable"]).to eq(true)
end
it "includes user ids for all personas in the serializer" do
Group.refresh_automatic_groups!
persona = Fabricate(:ai_persona, enabled: true, allowed_group_ids: [bot_allowed_group.id])
persona.create_user!
serializer = CurrentUserSerializer.new(admin, scope: Guardian.new(admin))
serializer = serializer.as_json
bots = serializer[:current_user][:ai_enabled_chat_bots]
persona_bot = bots.find { |bot| bot["id"] == persona.user_id }
expect(persona_bot["username"]).to eq(persona.user.username)
expect(persona_bot["mentionable"]).to eq(false)
end
it "queues a job to generate a reply by the AI" do

View File

@ -2,12 +2,58 @@
RSpec.describe DiscourseAi::AiBot::BotController do
fab!(:user)
fab!(:pm_topic) { Fabricate(:private_message_topic) }
fab!(:pm_post) { Fabricate(:post, topic: pm_topic) }
before { sign_in(user) }
describe "#stop_streaming_response" do
fab!(:pm_topic) { Fabricate(:private_message_topic) }
fab!(:pm_post) { Fabricate(:post, topic: pm_topic) }
describe "#show_debug_info" do
before do
SiteSetting.ai_bot_enabled = true
SiteSetting.discourse_ai_enabled = true
end
it "returns a 403 when the user cannot debug the AI bot conversation" do
get "/discourse-ai/ai-bot/post/#{pm_post.id}/show-debug-info"
expect(response.status).to eq(403)
end
it "returns debug info if the user can debug the AI bot conversation" do
user = pm_topic.topic_allowed_users.first.user
sign_in(user)
AiApiAuditLog.create!(
post_id: pm_post.id,
provider_id: 1,
topic_id: pm_topic.id,
raw_request_payload: "request",
raw_response_payload: "response",
request_tokens: 1,
response_tokens: 2,
)
Group.refresh_automatic_groups!
SiteSetting.ai_bot_debugging_allowed_groups = user.groups.first.id.to_s
get "/discourse-ai/ai-bot/post/#{pm_post.id}/show-debug-info"
expect(response.status).to eq(200)
expect(response.parsed_body["request_tokens"]).to eq(1)
expect(response.parsed_body["response_tokens"]).to eq(2)
expect(response.parsed_body["raw_request_payload"]).to eq("request")
expect(response.parsed_body["raw_response_payload"]).to eq("response")
post2 = Fabricate(:post, topic: pm_topic)
# return previous post if current has no debug info
get "/discourse-ai/ai-bot/post/#{post2.id}/show-debug-info"
expect(response.status).to eq(200)
expect(response.parsed_body["request_tokens"]).to eq(1)
expect(response.parsed_body["response_tokens"]).to eq(2)
end
end
describe "#stop_streaming_response" do
let(:redis_stream_key) { "gpt_cancel:#{pm_post.id}" }
before { Discourse.redis.setex(redis_stream_key, 60, 1) }

View File

@ -87,6 +87,9 @@ module("Discourse AI | Unit | Model | ai-persona", function () {
vision_enabled: true,
vision_max_pixels: 100,
rag_uploads: [],
rag_chunk_tokens: 374,
rag_chunk_overlap_tokens: 10,
rag_conversation_chunks: 10,
};
const aiPersona = AiPersona.create({ ...properties });