FEATURE: smarter persona tethering (#832)

Splits persona permissions so you can allow a persona on:

- chat dms
- personal messages
- topic mentions
- chat channels

(any combination is allowed)

Previously we did not have this flexibility.

Additionally, adds the ability to "tether" a language model to a persona so it will always be used by the persona. This allows people to use a cheaper language model for one group of people and more expensive one for other people
This commit is contained in:
Sam 2024-10-16 07:20:31 +11:00 committed by GitHub
parent c7acb4a6a0
commit bdf3b6268b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 422 additions and 122 deletions

View File

@ -96,7 +96,6 @@ module DiscourseAi
:temperature,
:default_llm,
:user_id,
:mentionable,
:max_context_posts,
:vision_enabled,
:vision_max_pixels,
@ -104,9 +103,13 @@ module DiscourseAi
:rag_chunk_overlap_tokens,
:rag_conversation_chunks,
:question_consolidator_llm,
:allow_chat,
:allow_chat_channel_mentions,
:allow_chat_direct_messages,
:allow_topic_mentions,
:allow_personal_messages,
:tool_details,
:forced_tool_count,
:force_default_llm,
allowed_group_ids: [],
rag_uploads: [:id],
)

View File

@ -1,8 +1,8 @@
# frozen_string_literal: true
class AiPersona < ActiveRecord::Base
# TODO remove this line 01-11-2024
self.ignored_columns = [:commands]
# TODO remove this line 01-1-2025
self.ignored_columns = %i[commands allow_chat mentionable]
# places a hard limit, so per site we cache a maximum of 500 classes
MAX_PERSONAS_PER_SITE = 500
@ -52,30 +52,47 @@ class AiPersona < ActiveRecord::Base
persona_cache[:persona_users] ||= AiPersona
.where(enabled: true)
.joins(:user)
.pluck(
"ai_personas.id, users.id, users.username_lower, allowed_group_ids, default_llm, mentionable, allow_chat",
)
.map do |id, user_id, username, allowed_group_ids, default_llm, mentionable, allow_chat|
.map do |persona|
{
id: id,
user_id: user_id,
username: username,
allowed_group_ids: allowed_group_ids,
default_llm: default_llm,
mentionable: mentionable,
allow_chat: allow_chat,
id: persona.id,
user_id: persona.user_id,
username: persona.user.username_lower,
allowed_group_ids: persona.allowed_group_ids,
default_llm: persona.default_llm,
force_default_llm: persona.force_default_llm,
allow_chat_channel_mentions: persona.allow_chat_channel_mentions,
allow_chat_direct_messages: persona.allow_chat_direct_messages,
allow_topic_mentions: persona.allow_topic_mentions,
allow_personal_messages: persona.allow_personal_messages,
}
end
if user
persona_users.select { |mentionable| user.in_any_groups?(mentionable[:allowed_group_ids]) }
persona_users.select { |persona_user| user.in_any_groups?(persona_user[:allowed_group_ids]) }
else
persona_users
end
end
def self.allowed_chat(user: nil)
personas = persona_cache[:allowed_chat] ||= persona_users.select { |u| u[:allow_chat] }
def self.allowed_modalities(
user: nil,
allow_chat_channel_mentions: false,
allow_chat_direct_messages: false,
allow_topic_mentions: false,
allow_personal_messages: false
)
index =
"modality-#{allow_chat_channel_mentions}-#{allow_chat_direct_messages}-#{allow_topic_mentions}-#{allow_personal_messages}"
personas =
persona_cache[index.to_sym] ||= persona_users.select do |persona|
next true if allow_chat_channel_mentions && persona[:allow_chat_channel_mentions]
next true if allow_chat_direct_messages && persona[:allow_chat_direct_messages]
next true if allow_topic_mentions && persona[:allow_topic_mentions]
next true if allow_personal_messages && persona[:allow_personal_messages]
false
end
if user
personas.select { |u| user.in_any_groups?(u[:allowed_group_ids]) }
else
@ -83,18 +100,6 @@ class AiPersona < ActiveRecord::Base
end
end
def self.mentionables(user: nil)
all_mentionables =
persona_cache[:mentionables] ||= persona_users.select do |mentionable|
mentionable[:mentionable]
end
if user
all_mentionables.select { |mentionable| user.in_any_groups?(mentionable[:allowed_group_ids]) }
else
all_mentionables
end
end
after_commit :bump_cache
def bump_cache
@ -113,7 +118,11 @@ class AiPersona < ActiveRecord::Base
vision_max_pixels
rag_conversation_chunks
question_consolidator_llm
allow_chat
allow_chat_channel_mentions
allow_chat_direct_messages
allow_topic_mentions
allow_personal_messages
force_default_llm
name
description
allowed_group_ids
@ -128,6 +137,8 @@ class AiPersona < ActiveRecord::Base
instance_attributes[attr] = value
end
instance_attributes[:username] = user&.username_lower
if persona_class
instance_attributes.each do |key, value|
# description/name are localized
@ -243,7 +254,10 @@ class AiPersona < ActiveRecord::Base
private
def chat_preconditions
if allow_chat && !default_llm
if (
allow_chat_channel_mentions || allow_chat_direct_messages || allow_topic_mentions ||
force_default_llm
) && !default_llm
errors.add(:default_llm, I18n.t("discourse_ai.ai_bot.personas.default_llm_required"))
end
end
@ -281,7 +295,6 @@ end
# temperature :float
# top_p :float
# user_id :integer
# mentionable :boolean default(FALSE), not null
# default_llm :text
# max_context_posts :integer
# max_post_context_tokens :integer
@ -291,16 +304,15 @@ end
# rag_chunk_tokens :integer default(374), not null
# rag_chunk_overlap_tokens :integer default(10), not null
# rag_conversation_chunks :integer default(10), not null
# role :enum default("bot"), not null
# role_category_ids :integer default([]), not null, is an Array
# role_tags :string default([]), not null, is an Array
# role_group_ids :integer default([]), not null, is an Array
# role_whispers :boolean default(FALSE), not null
# role_max_responses_per_hour :integer default(50), not null
# question_consolidator_llm :text
# allow_chat :boolean default(FALSE), not null
# tool_details :boolean default(TRUE), not null
# tools :json not null
# forced_tool_count :integer default(-1), not null
# allow_chat_channel_mentions :boolean default(FALSE), not null
# allow_chat_direct_messages :boolean default(FALSE), not null
# allow_topic_mentions :boolean default(FALSE), not null
# allow_personal_message :boolean default(TRUE), not null
# force_default_llm :boolean default(FALSE), not null
#
# Indexes
#

View File

@ -14,7 +14,6 @@ class LocalizedAiPersonaSerializer < ApplicationSerializer
:allowed_group_ids,
:temperature,
:top_p,
:mentionable,
:default_llm,
:user_id,
:max_context_posts,
@ -24,9 +23,13 @@ class LocalizedAiPersonaSerializer < ApplicationSerializer
:rag_chunk_overlap_tokens,
:rag_conversation_chunks,
:question_consolidator_llm,
:allow_chat,
:tool_details,
:forced_tool_count
:forced_tool_count,
:allow_chat_channel_mentions,
:allow_chat_direct_messages,
:allow_topic_mentions,
:allow_personal_messages,
:force_default_llm
has_one :user, serializer: BasicUserSerializer, embed: :object
has_many :rag_uploads, serializer: UploadSerializer, embed: :object

View File

@ -15,8 +15,8 @@ const CREATE_ATTRIBUTES = [
"top_p",
"temperature",
"user_id",
"mentionable",
"default_llm",
"force_default_llm",
"user",
"max_context_posts",
"vision_enabled",
@ -29,6 +29,10 @@ const CREATE_ATTRIBUTES = [
"allow_chat",
"tool_details",
"forced_tool_count",
"allow_personal_messages",
"allow_topic_mentions",
"allow_chat_channel_mentions",
"allow_chat_direct_messages",
];
const SYSTEM_ATTRIBUTES = [
@ -38,8 +42,8 @@ const SYSTEM_ATTRIBUTES = [
"system",
"priority",
"user_id",
"mentionable",
"default_llm",
"force_default_llm",
"user",
"max_context_posts",
"vision_enabled",
@ -49,8 +53,11 @@ const SYSTEM_ATTRIBUTES = [
"rag_chunk_overlap_tokens",
"rag_conversation_chunks",
"question_consolidator_llm",
"allow_chat",
"tool_details",
"allow_personal_messages",
"allow_topic_mentions",
"allow_chat_channel_mentions",
"allow_chat_direct_messages",
];
class ToolOption {

View File

@ -44,6 +44,7 @@ export default class PersonaEditor extends Component {
@tracked selectedTools = [];
@tracked selectedToolNames = [];
@tracked forcedToolNames = [];
@tracked hasDefaultLlm = false;
get chatPluginEnabled() {
return this.siteSettings.chat_enabled;
@ -81,6 +82,7 @@ export default class PersonaEditor extends Component {
@action
updateModel() {
this.editingModel = this.args.model.workingCopy();
this.hasDefaultLlm = !!this.editingModel.default_llm;
this.showDelete = !this.args.model.isNew && !this.args.model.system;
this.maxPixelsValue = this.findClosestPixelValue(
this.editingModel.vision_max_pixels
@ -183,8 +185,10 @@ export default class PersonaEditor extends Component {
set mappedDefaultLlm(value) {
if (value === "blank") {
this.editingModel.default_llm = null;
this.hasDefaultLlm = false;
} else {
this.editingModel.default_llm = value;
this.hasDefaultLlm = true;
}
}
@ -344,6 +348,16 @@ export default class PersonaEditor extends Component {
@content={{I18n.t "discourse_ai.ai_persona.default_llm_help"}}
/>
</div>
{{#if this.hasDefaultLlm}}
<div class="control-group">
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.force_default_llm}}
/>
{{I18n.t "discourse_ai.ai_persona.force_default_llm"}}</label>
</div>
{{/if}}
{{#unless @model.isNew}}
<div class="control-group">
<label>{{I18n.t "discourse_ai.ai_persona.user"}}</label>
@ -429,33 +443,73 @@ export default class PersonaEditor extends Component {
disabled={{this.editingModel.system}}
/>
</div>
<div class="control-group ai-persona-editor__allow_personal_messages">
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.allow_personal_messages}}
/>
{{I18n.t "discourse_ai.ai_persona.allow_personal_messages"}}</label>
<DTooltip
@icon="question-circle"
@content={{I18n.t
"discourse_ai.ai_persona.allow_personal_messages_help"
}}
/>
</div>
{{#if this.editingModel.user}}
{{#if this.chatPluginEnabled}}
<div class="control-group ai-persona-editor__allow_chat">
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.allow_chat}}
/>
{{I18n.t "discourse_ai.ai_persona.allow_chat"}}</label>
<DTooltip
@icon="question-circle"
@content={{I18n.t "discourse_ai.ai_persona.allow_chat_help"}}
/>
</div>
{{/if}}
<div class="control-group ai-persona-editor__mentionable">
<div class="control-group ai-persona-editor__allow_topic_mentions">
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.mentionable}}
@checked={{this.editingModel.allow_topic_mentions}}
/>
{{I18n.t "discourse_ai.ai_persona.mentionable"}}</label>
{{I18n.t "discourse_ai.ai_persona.allow_topic_mentions"}}</label>
<DTooltip
@icon="question-circle"
@content={{I18n.t "discourse_ai.ai_persona.mentionable_help"}}
@content={{I18n.t
"discourse_ai.ai_persona.allow_topic_mentions_help"
}}
/>
</div>
{{#if this.chatPluginEnabled}}
<div
class="control-group ai-persona-editor__allow_chat_direct_messages"
>
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.allow_chat_direct_messages}}
/>
{{I18n.t
"discourse_ai.ai_persona.allow_chat_direct_messages"
}}</label>
<DTooltip
@icon="question-circle"
@content={{I18n.t
"discourse_ai.ai_persona.allow_chat_direct_messages_help"
}}
/>
</div>
<div
class="control-group ai-persona-editor__allow_chat_channel_mentions"
>
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.allow_chat_channel_mentions}}
/>
{{I18n.t
"discourse_ai.ai_persona.allow_chat_channel_mentions"
}}</label>
<DTooltip
@icon="question-circle"
@content={{I18n.t
"discourse_ai.ai_persona.allow_chat_channel_mentions_help"
}}
/>
</div>
{{/if}}
{{/if}}
<div class="control-group ai-persona-editor__tool-details">
<label>

View File

@ -32,6 +32,7 @@ export default class BotSelector extends Component {
@service currentUser;
@service siteSettings;
@tracked llm;
@tracked allowLLMSelector = true;
STORE_NAMESPACE = "discourse_ai_persona_selector_";
LLM_STORE_NAMESPACE = "discourse_ai_llm_selector_";
@ -54,6 +55,7 @@ export default class BotSelector extends Component {
}
this.composer.metaData = { ai_persona_id: this._value };
this.setAllowLLMSelector();
let llm = this.preferredLlmStore.getObject("id");
llm = llm || this.llmOptions[0].id;
@ -93,6 +95,15 @@ export default class BotSelector extends Component {
this._value = newValue;
this.preferredPersonaStore.setObject({ key: "id", value: newValue });
this.composer.metaData = { ai_persona_id: newValue };
this.setAllowLLMSelector();
}
setAllowLLMSelector() {
const persona = this.currentUser.ai_enabled_personas.find(
(innerPersona) => innerPersona.id === this._value
);
this.allowLLMSelector = !persona?.force_default_llm;
}
get currentLlm() {
@ -105,7 +116,14 @@ export default class BotSelector extends Component {
(bot) => bot.model_name === this.llm
).username;
this.preferredLlmStore.setObject({ key: "id", value: newValue });
this.composer.set("targetRecipients", botUsername);
if (this.allowLLMSelector) {
this.composer.set("targetRecipients", botUsername);
} else {
const persona = this.currentUser.ai_enabled_personas.find(
(innerPersona) => innerPersona.id === this._value
);
this.composer.set("targetRecipients", persona.username || "");
}
}
get llmOptions() {
@ -131,14 +149,16 @@ export default class BotSelector extends Component {
@options={{hash icon="robot" filterable=this.filterable}}
/>
</div>
<div class="llm-selector">
<DropdownSelectBox
class="persona-llm-selector__llm-dropdown"
@value={{this.currentLlm}}
@content={{this.llmOptions}}
@options={{hash icon="globe"}}
/>
</div>
{{#if this.allowLLMSelector}}
<div class="llm-selector">
<DropdownSelectBox
class="persona-llm-selector__llm-dropdown"
@value={{this.currentLlm}}
@content={{this.llmOptions}}
@options={{hash icon="globe"}}
/>
</div>
{{/if}}
</div>
</template>
}

View File

@ -68,9 +68,12 @@
&__tool-details,
&__vision_enabled,
&__allow_chat,
&__priority,
&__mentionable {
&__allow_chat_direct_messages,
&__allow_chat_channel_mentions,
&__allow_topic_mentions,
&__allow_personal_messages,
&__force_default_llm,
&__priority {
display: flex;
align-items: center;
}

View File

@ -148,8 +148,15 @@ en:
question_consolidator_llm_help: The language model to use for the question consolidator, you may choose a less powerful model to save costs.
system_prompt: System Prompt
forced_tool_strategy: Forced Tool Strategy
allow_chat: "Allow Chat"
allow_chat_help: "If enabled, users in allowed groups can DM this persona"
allow_chat_direct_messages: "Allow Chat Direct Messages"
allow_chat_direct_messages_help: "If enabled, users in allowed groups can send direct messages to this persona."
allow_chat_channel_mentions: "Allow Chat Channel Mentions"
allow_chat_channel_mentions_help: "If enabled, users in allowed groups can mention this persona in chat channels."
allow_personal_messages: "Allow Personal Messages"
allow_personal_messages_help: "If enabled, users in allowed groups can send personal messages to this persona."
allow_topic_mentions: "Allow Topic Mentions"
allow_topic_mentions_help: "If enabled, users in allowed groups can mention this persona in topics."
force_default_llm: "Always use default Language Model"
save: Save
saved: AI Persona Saved
enabled: "Enabled?"

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class AiPersonaChatTopicRefactor < ActiveRecord::Migration[7.1]
def change
add_column :ai_personas, :allow_chat_channel_mentions, :boolean, default: false, null: false
add_column :ai_personas, :allow_chat_direct_messages, :boolean, default: false, null: false
add_column :ai_personas, :allow_topic_mentions, :boolean, default: false, null: false
add_column :ai_personas, :allow_personal_messages, :boolean, default: true, null: false
add_column :ai_personas, :force_default_llm, :boolean, default: false, null: false
execute <<~SQL
UPDATE ai_personas
SET allow_chat_channel_mentions = mentionable, allow_chat_direct_messages = true
WHERE allow_chat = true
SQL
execute <<~SQL
UPDATE ai_personas
SET allow_topic_mentions = true
WHERE mentionable = true
SQL
end
end

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
class AiPersonaPostMigrateDropCols < ActiveRecord::Migration[7.1]
def change
remove_columns :ai_personas, :allow_chat
remove_columns :ai_personas, :mentionable
end
end

View File

@ -8,11 +8,10 @@ module DiscourseAi
Bot = Struct.new(:id, :name, :llm)
def self.all_bot_ids
mentionable_persona_user_ids =
AiPersona.mentionables.map { |mentionable| mentionable[:user_id] }
mentionable_bot_users = LlmModel.joins(:user).pluck("users.id")
mentionable_bot_users + mentionable_persona_user_ids
AiPersona
.persona_users
.map { |persona| persona[:user_id] }
.concat(LlmModel.where(enabled_chat_bot: true).pluck(:user_id))
end
def self.find_participant_in(participant_ids)
@ -109,7 +108,13 @@ module DiscourseAi
DiscourseAi::AiBot::Personas::Persona
.all(user: scope.user)
.map do |persona|
{ id: persona.id, name: persona.name, description: persona.description }
{
id: persona.id,
name: persona.name,
description: persona.description,
force_default_llm: persona.force_default_llm,
username: persona.username,
}
end
end
@ -140,7 +145,7 @@ module DiscourseAi
{
"id" => persona_user[:user_id],
"username" => persona_user[:username],
"mentionable" => persona_user[:mentionable],
"force_default_llm" => persona_user[:force_default_llm],
"is_persona" => true,
}
end,

View File

@ -21,7 +21,15 @@ module DiscourseAi
nil
end
def allow_chat
def force_default_llm
false
end
def allow_chat_channel_mentions
false
end
def allow_chat_direct_messages
false
end

View File

@ -11,17 +11,19 @@ module DiscourseAi
def self.find_chat_persona(message, channel, user)
if channel.direct_message_channel?
AiPersona.allowed_chat.find do |p|
p[:user_id].in?(channel.allowed_user_ids) && (user.group_ids & p[:allowed_group_ids])
end
AiPersona
.allowed_modalities(allow_chat_direct_messages: true)
.find do |p|
p[:user_id].in?(channel.allowed_user_ids) && (user.group_ids & p[:allowed_group_ids])
end
else
# let's defer on the parse if there is no @ in the message
if message.message.include?("@")
mentions = message.parsed_mentions.parsed_direct_mentions
if mentions.present?
AiPersona.allowed_chat.find do |p|
p[:username].in?(mentions) && (user.group_ids & p[:allowed_group_ids])
end
AiPersona
.allowed_modalities(allow_chat_channel_mentions: true)
.find { |p| p[:username].in?(mentions) && (user.group_ids & p[:allowed_group_ids]) }
end
end
end
@ -29,8 +31,14 @@ module DiscourseAi
def self.schedule_chat_reply(message, channel, user, context)
return if !SiteSetting.ai_bot_enabled
return if AiPersona.allowed_chat.blank?
return if AiPersona.allowed_chat.any? { |m| m[:user_id] == user.id }
all_chat =
AiPersona.allowed_modalities(
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
)
return if all_chat.blank?
return if all_chat.any? { |m| m[:user_id] == user.id }
persona = find_chat_persona(message, channel, user)
return if !persona
@ -56,15 +64,23 @@ module DiscourseAi
def self.schedule_reply(post)
return if is_bot_user_id?(post.user_id)
mentionables = nil
bot_ids = LlmModel.joins(:user).pluck("users.id")
mentionables = AiPersona.mentionables(user: post.user)
if post.topic.private_message?
mentionables =
AiPersona.allowed_modalities(user: post.user, allow_personal_messages: true)
else
mentionables = AiPersona.allowed_modalities(user: post.user, allow_topic_mentions: true)
end
bot_user = nil
mentioned = nil
all_llm_user_ids = LlmModel.joins(:user).pluck("users.id")
if post.topic.private_message?
bot_user = post.topic.topic_allowed_users.where(user_id: bot_ids).first&.user
# this is an edge case, you started a PM with a different bot
bot_user = post.topic.topic_allowed_users.where(user_id: all_llm_user_ids).first&.user
bot_user ||=
post
.topic
@ -114,6 +130,8 @@ module DiscourseAi
persona ||= DiscourseAi::AiBot::Personas::General
bot_user = User.find(persona.user_id) if persona && persona.force_default_llm
bot = DiscourseAi::AiBot::Bot.as(bot_user, persona: persona.new)
new(bot).update_playground_with(post)
end

View File

@ -35,15 +35,16 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
expect(serializer[:current_user][:can_debug_ai_bot_conversations]).to eq(true)
end
it "adds mentionables to current_user_serializer" do
it "adds information about forcing default llm to current_user_serializer" do
Group.refresh_automatic_groups!
persona =
Fabricate(
:ai_persona,
mentionable: true,
enabled: true,
allowed_group_ids: [bot_allowed_group.id],
default_llm: "claude-2",
force_default_llm: true,
)
persona.create_user!
@ -54,7 +55,7 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
persona_bot = bots.find { |bot| bot["id"] == persona.user_id }
expect(persona_bot["username"]).to eq(persona.user.username)
expect(persona_bot["mentionable"]).to eq(true)
expect(persona_bot["force_default_llm"]).to eq(true)
end
it "includes user ids for all personas in the serializer" do
@ -69,7 +70,7 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
persona_bot = bots.find { |bot| bot["id"] == persona.user_id }
expect(persona_bot["username"]).to eq(persona.user.username)
expect(persona_bot["mentionable"]).to eq(false)
expect(persona_bot["force_default_llm"]).to eq(false)
end
it "queues a job to generate a reply by the AI" do

View File

@ -55,6 +55,11 @@ RSpec.describe DiscourseAi::AiBot::Playground do
)
end
after do
# we must reset cache on persona cause data can be rolled back
AiPersona.persona_cache.flush!
end
describe "is_bot_user_id?" do
it "properly detects ALL bots as bot users" do
persona = Fabricate(:ai_persona, enabled: false)
@ -227,7 +232,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do
vision_enabled: true,
vision_max_pixels: 1_000,
default_llm: "custom:#{opus_model.id}",
mentionable: true,
allow_topic_mentions: true,
)
end
@ -277,7 +282,11 @@ RSpec.describe DiscourseAi::AiBot::Playground do
)
persona.create_user!
persona.update!(default_llm: "custom:#{claude_2.id}", mentionable: true)
persona.update!(
default_llm: "custom:#{claude_2.id}",
allow_chat_channel_mentions: true,
allow_topic_mentions: true,
)
persona
end
@ -294,7 +303,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do
SiteSetting.ai_bot_enabled = true
SiteSetting.chat_allowed_groups = "#{Group::AUTO_GROUPS[:trust_level_0]}"
Group.refresh_automatic_groups!
persona.update!(allow_chat: true, mentionable: true, default_llm: "custom:#{opus_model.id}")
persona.update!(allow_chat_channel_mentions: true, default_llm: "custom:#{opus_model.id}")
end
it "should behave in a sane way when threading is enabled" do
@ -406,8 +415,9 @@ RSpec.describe DiscourseAi::AiBot::Playground do
SiteSetting.chat_allowed_groups = "#{Group::AUTO_GROUPS[:trust_level_0]}"
Group.refresh_automatic_groups!
persona.update!(
allow_chat: true,
mentionable: false,
allow_chat_direct_messages: true,
allow_topic_mentions: false,
allow_chat_channel_mentions: false,
default_llm: "custom:#{opus_model.id}",
)
SiteSetting.ai_bot_enabled = true
@ -481,7 +491,6 @@ RSpec.describe DiscourseAi::AiBot::Playground do
# it also needs to include history per config - first feed some history
persona.update!(enabled: false)
persona_guardian = Guardian.new(persona.user)
4.times do |i|
@ -561,6 +570,8 @@ RSpec.describe DiscourseAi::AiBot::Playground do
# we still should be able to mention with no bots
toggle_enabled_bots(bots: [])
persona.update!(allow_topic_mentions: true)
post = nil
DiscourseAi::Completions::Llm.with_prepared_responses(["Yes I can"]) do
post =
@ -574,6 +585,16 @@ RSpec.describe DiscourseAi::AiBot::Playground do
last_post = post.topic.posts.order(:post_number).last
expect(last_post.raw).to eq("Yes I can")
expect(last_post.user_id).to eq(persona.user_id)
persona.update!(allow_topic_mentions: false)
post =
create_post(
title: "My public topic ABC",
raw: "Hey @#{persona.user.username}, can you help me?",
)
expect(post.topic.posts.last.post_number).to eq(1)
end
it "allows PMing a persona even when no particular bots are enabled" do
@ -603,6 +624,50 @@ RSpec.describe DiscourseAi::AiBot::Playground do
expect(last_post.topic.allowed_users.pluck(:user_id)).to include(persona.user_id)
expect(last_post.topic.participant_count).to eq(2)
# ensure it can be disabled
persona.update!(allow_personal_messages: false)
post =
create_post(
raw: "Hey there #{persona.user.username}, can you help me please",
topic_id: post.topic.id,
user: admin,
)
expect(post.post_number).to eq(3)
end
it "can tether a persona unconditionally to an llm" do
gpt_35_turbo = Fabricate(:llm_model, name: "gpt-3.5-turbo")
# If you start a PM with GPT 3.5 bot, replies should come from it, not from Claude
SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [gpt_35_turbo, claude_2])
post = nil
persona.update!(force_default_llm: true, default_llm: "custom:#{gpt_35_turbo.id}")
DiscourseAi::Completions::Llm.with_prepared_responses(
["Yes I can", "Magic Title"],
llm: "custom:#{gpt_35_turbo.id}",
) do
post =
create_post(
title: "I just made a PM",
raw: "hello world",
target_usernames: "#{user.username},#{claude_2.user.username}",
archetype: Archetype.private_message,
user: admin,
custom_fields: {
"ai_persona_id" => persona.id,
},
)
end
last_post = post.topic.posts.order(:post_number).last
expect(last_post.raw).to eq("Yes I can")
expect(last_post.user_id).to eq(persona.user_id)
end
it "picks the correct llm for persona in PMs" do

View File

@ -71,9 +71,12 @@ RSpec.describe AiPersona do
forum_helper = AiPersona.find_by(name: "Forum Helper")
forum_helper.update!(
user_id: 1,
mentionable: true,
default_llm: "anthropic:claude-2",
max_context_posts: 3,
allow_topic_mentions: true,
allow_personal_messages: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
)
klass = forum_helper.class_instance
@ -83,9 +86,12 @@ RSpec.describe AiPersona do
# tl 0 by default
expect(klass.allowed_group_ids).to eq([10])
expect(klass.user_id).to eq(1)
expect(klass.mentionable).to eq(true)
expect(klass.default_llm).to eq("anthropic:claude-2")
expect(klass.max_context_posts).to eq(3)
expect(klass.allow_topic_mentions).to eq(true)
expect(klass.allow_personal_messages).to eq(true)
expect(klass.allow_chat_channel_mentions).to eq(true)
expect(klass.allow_chat_direct_messages).to eq(true)
end
it "defines singleton methods non persona classes" do
@ -98,7 +104,10 @@ RSpec.describe AiPersona do
allowed_group_ids: [],
default_llm: "anthropic:claude-2",
max_context_posts: 3,
mentionable: true,
allow_topic_mentions: true,
allow_personal_messages: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
user_id: 1,
)
@ -108,12 +117,15 @@ RSpec.describe AiPersona do
expect(klass.system).to eq(false)
expect(klass.allowed_group_ids).to eq([])
expect(klass.user_id).to eq(1)
expect(klass.mentionable).to eq(true)
expect(klass.default_llm).to eq("anthropic:claude-2")
expect(klass.max_context_posts).to eq(3)
expect(klass.allow_topic_mentions).to eq(true)
expect(klass.allow_personal_messages).to eq(true)
expect(klass.allow_chat_channel_mentions).to eq(true)
expect(klass.allow_chat_direct_messages).to eq(true)
end
it "does not allow setting allow_chat without a default_llm" do
it "does not allow setting allowing chat without a default_llm" do
persona =
AiPersona.create(
name: "test",
@ -121,7 +133,37 @@ RSpec.describe AiPersona do
system_prompt: "test",
allowed_group_ids: [],
default_llm: nil,
allow_chat: true,
allow_chat_channel_mentions: true,
)
expect(persona.valid?).to eq(false)
expect(persona.errors[:default_llm].first).to eq(
I18n.t("discourse_ai.ai_bot.personas.default_llm_required"),
)
persona =
AiPersona.create(
name: "test",
description: "test",
system_prompt: "test",
allowed_group_ids: [],
default_llm: nil,
allow_chat_direct_messages: true,
)
expect(persona.valid?).to eq(false)
expect(persona.errors[:default_llm].first).to eq(
I18n.t("discourse_ai.ai_bot.personas.default_llm_required"),
)
persona =
AiPersona.create(
name: "test",
description: "test",
system_prompt: "test",
allowed_group_ids: [],
default_llm: nil,
allow_topic_mentions: true,
)
expect(persona.valid?).to eq(false)

View File

@ -2,7 +2,10 @@
module DiscourseAi::ChatBotHelper
def toggle_enabled_bots(bots: [])
LlmModel.update_all(enabled_chat_bot: false)
models = LlmModel.all
models = models.where("id not in (?)", bots.map(&:id)) if bots.present?
models.update_all(enabled_chat_bot: false)
bots.each { |b| b.update!(enabled_chat_bot: true) }
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
end

View File

@ -40,7 +40,10 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
:ai_persona,
name: "search2",
tools: [["SearchCommand", { base_query: "test" }, true]],
mentionable: true,
allow_topic_mentions: true,
allow_personal_messages: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
default_llm: "anthropic:claude-2",
forced_tool_count: 2,
)
@ -52,7 +55,11 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
serializer_persona1 = response.parsed_body["ai_personas"].find { |p| p["id"] == persona1.id }
serializer_persona2 = response.parsed_body["ai_personas"].find { |p| p["id"] == persona2.id }
expect(serializer_persona2["mentionable"]).to eq(true)
expect(serializer_persona2["allow_topic_mentions"]).to eq(true)
expect(serializer_persona2["allow_personal_messages"]).to eq(true)
expect(serializer_persona2["allow_chat_channel_mentions"]).to eq(true)
expect(serializer_persona2["allow_chat_direct_messages"]).to eq(true)
expect(serializer_persona2["default_llm"]).to eq("anthropic:claude-2")
expect(serializer_persona2["user_id"]).to eq(persona2.user_id)
expect(serializer_persona2["user"]["id"]).to eq(persona2.user_id)
@ -167,7 +174,10 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
tools: [["search", { "base_query" => "test" }, true]],
top_p: 0.1,
temperature: 0.5,
mentionable: true,
allow_topic_mentions: true,
allow_personal_messages: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
default_llm: "anthropic:claude-2",
forced_tool_count: 2,
}
@ -186,9 +196,12 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
expect(persona_json["name"]).to eq("superbot")
expect(persona_json["top_p"]).to eq(0.1)
expect(persona_json["temperature"]).to eq(0.5)
expect(persona_json["mentionable"]).to eq(true)
expect(persona_json["default_llm"]).to eq("anthropic:claude-2")
expect(persona_json["forced_tool_count"]).to eq(2)
expect(persona_json["allow_topic_mentions"]).to eq(true)
expect(persona_json["allow_personal_messages"]).to eq(true)
expect(persona_json["allow_chat_channel_mentions"]).to eq(true)
expect(persona_json["allow_chat_direct_messages"]).to eq(true)
persona = AiPersona.find(persona_json["id"])

View File

@ -63,8 +63,6 @@ describe DiscourseAi::Embeddings::EmbeddingsController do
context "when rate limiting is enabled" do
before { RateLimiter.enable }
use_redis_snapshotting
it "will rate limit correctly" do
stub_const(subject.class, :MAX_HYDE_SEARCHES_PER_MINUTE, 1) do
stub_const(subject.class, :MAX_SEARCHES_PER_MINUTE, 2) do

View File

@ -37,8 +37,8 @@ module("Discourse AI | Unit | Model | ai-persona", function () {
description: "Description",
top_p: 0.8,
temperature: 0.7,
mentionable: false,
default_llm: "Default LLM",
force_default_llm: false,
user: null,
user_id: null,
max_context_posts: 5,
@ -52,6 +52,10 @@ module("Discourse AI | Unit | Model | ai-persona", function () {
allow_chat: false,
tool_details: true,
forced_tool_count: -1,
allow_personal_messages: true,
allow_topic_mentions: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
};
const aiPersona = AiPersona.create({ ...properties });
@ -82,7 +86,6 @@ module("Discourse AI | Unit | Model | ai-persona", function () {
user: null,
user_id: null,
default_llm: "Default LLM",
mentionable: false,
max_context_posts: 5,
vision_enabled: true,
vision_max_pixels: 100,
@ -94,6 +97,11 @@ module("Discourse AI | Unit | Model | ai-persona", function () {
allow_chat: false,
tool_details: true,
forced_tool_count: -1,
allow_personal_messages: true,
allow_topic_mentions: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
force_default_llm: false,
};
const aiPersona = AiPersona.create({ ...properties });