FEATURE: smarter persona tethering (#832)

Splits persona permissions so you can allow a persona on:

- chat dms
- personal messages
- topic mentions
- chat channels

(any combination is allowed)

Previously we did not have this flexibility.

Additionally, adds the ability to "tether" a language model to a persona so it will always be used by the persona. This allows people to use a cheaper language model for one group of people and more expensive one for other people
This commit is contained in:
Sam 2024-10-16 07:20:31 +11:00 committed by GitHub
parent c7acb4a6a0
commit bdf3b6268b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 422 additions and 122 deletions

View File

@ -96,7 +96,6 @@ module DiscourseAi
:temperature, :temperature,
:default_llm, :default_llm,
:user_id, :user_id,
:mentionable,
:max_context_posts, :max_context_posts,
:vision_enabled, :vision_enabled,
:vision_max_pixels, :vision_max_pixels,
@ -104,9 +103,13 @@ module DiscourseAi
:rag_chunk_overlap_tokens, :rag_chunk_overlap_tokens,
:rag_conversation_chunks, :rag_conversation_chunks,
:question_consolidator_llm, :question_consolidator_llm,
:allow_chat, :allow_chat_channel_mentions,
:allow_chat_direct_messages,
:allow_topic_mentions,
:allow_personal_messages,
:tool_details, :tool_details,
:forced_tool_count, :forced_tool_count,
:force_default_llm,
allowed_group_ids: [], allowed_group_ids: [],
rag_uploads: [:id], rag_uploads: [:id],
) )

View File

@ -1,8 +1,8 @@
# frozen_string_literal: true # frozen_string_literal: true
class AiPersona < ActiveRecord::Base class AiPersona < ActiveRecord::Base
# TODO remove this line 01-11-2024 # TODO remove this line 01-1-2025
self.ignored_columns = [:commands] self.ignored_columns = %i[commands allow_chat mentionable]
# places a hard limit, so per site we cache a maximum of 500 classes # places a hard limit, so per site we cache a maximum of 500 classes
MAX_PERSONAS_PER_SITE = 500 MAX_PERSONAS_PER_SITE = 500
@ -52,30 +52,47 @@ class AiPersona < ActiveRecord::Base
persona_cache[:persona_users] ||= AiPersona persona_cache[:persona_users] ||= AiPersona
.where(enabled: true) .where(enabled: true)
.joins(:user) .joins(:user)
.pluck( .map do |persona|
"ai_personas.id, users.id, users.username_lower, allowed_group_ids, default_llm, mentionable, allow_chat",
)
.map do |id, user_id, username, allowed_group_ids, default_llm, mentionable, allow_chat|
{ {
id: id, id: persona.id,
user_id: user_id, user_id: persona.user_id,
username: username, username: persona.user.username_lower,
allowed_group_ids: allowed_group_ids, allowed_group_ids: persona.allowed_group_ids,
default_llm: default_llm, default_llm: persona.default_llm,
mentionable: mentionable, force_default_llm: persona.force_default_llm,
allow_chat: allow_chat, allow_chat_channel_mentions: persona.allow_chat_channel_mentions,
allow_chat_direct_messages: persona.allow_chat_direct_messages,
allow_topic_mentions: persona.allow_topic_mentions,
allow_personal_messages: persona.allow_personal_messages,
} }
end end
if user if user
persona_users.select { |mentionable| user.in_any_groups?(mentionable[:allowed_group_ids]) } persona_users.select { |persona_user| user.in_any_groups?(persona_user[:allowed_group_ids]) }
else else
persona_users persona_users
end end
end end
def self.allowed_chat(user: nil) def self.allowed_modalities(
personas = persona_cache[:allowed_chat] ||= persona_users.select { |u| u[:allow_chat] } user: nil,
allow_chat_channel_mentions: false,
allow_chat_direct_messages: false,
allow_topic_mentions: false,
allow_personal_messages: false
)
index =
"modality-#{allow_chat_channel_mentions}-#{allow_chat_direct_messages}-#{allow_topic_mentions}-#{allow_personal_messages}"
personas =
persona_cache[index.to_sym] ||= persona_users.select do |persona|
next true if allow_chat_channel_mentions && persona[:allow_chat_channel_mentions]
next true if allow_chat_direct_messages && persona[:allow_chat_direct_messages]
next true if allow_topic_mentions && persona[:allow_topic_mentions]
next true if allow_personal_messages && persona[:allow_personal_messages]
false
end
if user if user
personas.select { |u| user.in_any_groups?(u[:allowed_group_ids]) } personas.select { |u| user.in_any_groups?(u[:allowed_group_ids]) }
else else
@ -83,18 +100,6 @@ class AiPersona < ActiveRecord::Base
end end
end end
def self.mentionables(user: nil)
all_mentionables =
persona_cache[:mentionables] ||= persona_users.select do |mentionable|
mentionable[:mentionable]
end
if user
all_mentionables.select { |mentionable| user.in_any_groups?(mentionable[:allowed_group_ids]) }
else
all_mentionables
end
end
after_commit :bump_cache after_commit :bump_cache
def bump_cache def bump_cache
@ -113,7 +118,11 @@ class AiPersona < ActiveRecord::Base
vision_max_pixels vision_max_pixels
rag_conversation_chunks rag_conversation_chunks
question_consolidator_llm question_consolidator_llm
allow_chat allow_chat_channel_mentions
allow_chat_direct_messages
allow_topic_mentions
allow_personal_messages
force_default_llm
name name
description description
allowed_group_ids allowed_group_ids
@ -128,6 +137,8 @@ class AiPersona < ActiveRecord::Base
instance_attributes[attr] = value instance_attributes[attr] = value
end end
instance_attributes[:username] = user&.username_lower
if persona_class if persona_class
instance_attributes.each do |key, value| instance_attributes.each do |key, value|
# description/name are localized # description/name are localized
@ -243,7 +254,10 @@ class AiPersona < ActiveRecord::Base
private private
def chat_preconditions def chat_preconditions
if allow_chat && !default_llm if (
allow_chat_channel_mentions || allow_chat_direct_messages || allow_topic_mentions ||
force_default_llm
) && !default_llm
errors.add(:default_llm, I18n.t("discourse_ai.ai_bot.personas.default_llm_required")) errors.add(:default_llm, I18n.t("discourse_ai.ai_bot.personas.default_llm_required"))
end end
end end
@ -281,7 +295,6 @@ end
# temperature :float # temperature :float
# top_p :float # top_p :float
# user_id :integer # user_id :integer
# mentionable :boolean default(FALSE), not null
# default_llm :text # default_llm :text
# max_context_posts :integer # max_context_posts :integer
# max_post_context_tokens :integer # max_post_context_tokens :integer
@ -291,16 +304,15 @@ end
# rag_chunk_tokens :integer default(374), not null # rag_chunk_tokens :integer default(374), not null
# rag_chunk_overlap_tokens :integer default(10), not null # rag_chunk_overlap_tokens :integer default(10), not null
# rag_conversation_chunks :integer default(10), not null # rag_conversation_chunks :integer default(10), not null
# role :enum default("bot"), not null
# role_category_ids :integer default([]), not null, is an Array
# role_tags :string default([]), not null, is an Array
# role_group_ids :integer default([]), not null, is an Array
# role_whispers :boolean default(FALSE), not null
# role_max_responses_per_hour :integer default(50), not null
# question_consolidator_llm :text # question_consolidator_llm :text
# allow_chat :boolean default(FALSE), not null
# tool_details :boolean default(TRUE), not null # tool_details :boolean default(TRUE), not null
# tools :json not null # tools :json not null
# forced_tool_count :integer default(-1), not null
# allow_chat_channel_mentions :boolean default(FALSE), not null
# allow_chat_direct_messages :boolean default(FALSE), not null
# allow_topic_mentions :boolean default(FALSE), not null
# allow_personal_message :boolean default(TRUE), not null
# force_default_llm :boolean default(FALSE), not null
# #
# Indexes # Indexes
# #

View File

@ -14,7 +14,6 @@ class LocalizedAiPersonaSerializer < ApplicationSerializer
:allowed_group_ids, :allowed_group_ids,
:temperature, :temperature,
:top_p, :top_p,
:mentionable,
:default_llm, :default_llm,
:user_id, :user_id,
:max_context_posts, :max_context_posts,
@ -24,9 +23,13 @@ class LocalizedAiPersonaSerializer < ApplicationSerializer
:rag_chunk_overlap_tokens, :rag_chunk_overlap_tokens,
:rag_conversation_chunks, :rag_conversation_chunks,
:question_consolidator_llm, :question_consolidator_llm,
:allow_chat,
:tool_details, :tool_details,
:forced_tool_count :forced_tool_count,
:allow_chat_channel_mentions,
:allow_chat_direct_messages,
:allow_topic_mentions,
:allow_personal_messages,
:force_default_llm
has_one :user, serializer: BasicUserSerializer, embed: :object has_one :user, serializer: BasicUserSerializer, embed: :object
has_many :rag_uploads, serializer: UploadSerializer, embed: :object has_many :rag_uploads, serializer: UploadSerializer, embed: :object

View File

@ -15,8 +15,8 @@ const CREATE_ATTRIBUTES = [
"top_p", "top_p",
"temperature", "temperature",
"user_id", "user_id",
"mentionable",
"default_llm", "default_llm",
"force_default_llm",
"user", "user",
"max_context_posts", "max_context_posts",
"vision_enabled", "vision_enabled",
@ -29,6 +29,10 @@ const CREATE_ATTRIBUTES = [
"allow_chat", "allow_chat",
"tool_details", "tool_details",
"forced_tool_count", "forced_tool_count",
"allow_personal_messages",
"allow_topic_mentions",
"allow_chat_channel_mentions",
"allow_chat_direct_messages",
]; ];
const SYSTEM_ATTRIBUTES = [ const SYSTEM_ATTRIBUTES = [
@ -38,8 +42,8 @@ const SYSTEM_ATTRIBUTES = [
"system", "system",
"priority", "priority",
"user_id", "user_id",
"mentionable",
"default_llm", "default_llm",
"force_default_llm",
"user", "user",
"max_context_posts", "max_context_posts",
"vision_enabled", "vision_enabled",
@ -49,8 +53,11 @@ const SYSTEM_ATTRIBUTES = [
"rag_chunk_overlap_tokens", "rag_chunk_overlap_tokens",
"rag_conversation_chunks", "rag_conversation_chunks",
"question_consolidator_llm", "question_consolidator_llm",
"allow_chat",
"tool_details", "tool_details",
"allow_personal_messages",
"allow_topic_mentions",
"allow_chat_channel_mentions",
"allow_chat_direct_messages",
]; ];
class ToolOption { class ToolOption {

View File

@ -44,6 +44,7 @@ export default class PersonaEditor extends Component {
@tracked selectedTools = []; @tracked selectedTools = [];
@tracked selectedToolNames = []; @tracked selectedToolNames = [];
@tracked forcedToolNames = []; @tracked forcedToolNames = [];
@tracked hasDefaultLlm = false;
get chatPluginEnabled() { get chatPluginEnabled() {
return this.siteSettings.chat_enabled; return this.siteSettings.chat_enabled;
@ -81,6 +82,7 @@ export default class PersonaEditor extends Component {
@action @action
updateModel() { updateModel() {
this.editingModel = this.args.model.workingCopy(); this.editingModel = this.args.model.workingCopy();
this.hasDefaultLlm = !!this.editingModel.default_llm;
this.showDelete = !this.args.model.isNew && !this.args.model.system; this.showDelete = !this.args.model.isNew && !this.args.model.system;
this.maxPixelsValue = this.findClosestPixelValue( this.maxPixelsValue = this.findClosestPixelValue(
this.editingModel.vision_max_pixels this.editingModel.vision_max_pixels
@ -183,8 +185,10 @@ export default class PersonaEditor extends Component {
set mappedDefaultLlm(value) { set mappedDefaultLlm(value) {
if (value === "blank") { if (value === "blank") {
this.editingModel.default_llm = null; this.editingModel.default_llm = null;
this.hasDefaultLlm = false;
} else { } else {
this.editingModel.default_llm = value; this.editingModel.default_llm = value;
this.hasDefaultLlm = true;
} }
} }
@ -344,6 +348,16 @@ export default class PersonaEditor extends Component {
@content={{I18n.t "discourse_ai.ai_persona.default_llm_help"}} @content={{I18n.t "discourse_ai.ai_persona.default_llm_help"}}
/> />
</div> </div>
{{#if this.hasDefaultLlm}}
<div class="control-group">
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.force_default_llm}}
/>
{{I18n.t "discourse_ai.ai_persona.force_default_llm"}}</label>
</div>
{{/if}}
{{#unless @model.isNew}} {{#unless @model.isNew}}
<div class="control-group"> <div class="control-group">
<label>{{I18n.t "discourse_ai.ai_persona.user"}}</label> <label>{{I18n.t "discourse_ai.ai_persona.user"}}</label>
@ -429,33 +443,73 @@ export default class PersonaEditor extends Component {
disabled={{this.editingModel.system}} disabled={{this.editingModel.system}}
/> />
</div> </div>
{{#if this.editingModel.user}} <div class="control-group ai-persona-editor__allow_personal_messages">
{{#if this.chatPluginEnabled}}
<div class="control-group ai-persona-editor__allow_chat">
<label> <label>
<Input <Input
@type="checkbox" @type="checkbox"
@checked={{this.editingModel.allow_chat}} @checked={{this.editingModel.allow_personal_messages}}
/> />
{{I18n.t "discourse_ai.ai_persona.allow_chat"}}</label> {{I18n.t "discourse_ai.ai_persona.allow_personal_messages"}}</label>
<DTooltip <DTooltip
@icon="question-circle" @icon="question-circle"
@content={{I18n.t "discourse_ai.ai_persona.allow_chat_help"}} @content={{I18n.t
"discourse_ai.ai_persona.allow_personal_messages_help"
}}
/>
</div>
{{#if this.editingModel.user}}
<div class="control-group ai-persona-editor__allow_topic_mentions">
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.allow_topic_mentions}}
/>
{{I18n.t "discourse_ai.ai_persona.allow_topic_mentions"}}</label>
<DTooltip
@icon="question-circle"
@content={{I18n.t
"discourse_ai.ai_persona.allow_topic_mentions_help"
}}
/>
</div>
{{#if this.chatPluginEnabled}}
<div
class="control-group ai-persona-editor__allow_chat_direct_messages"
>
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.allow_chat_direct_messages}}
/>
{{I18n.t
"discourse_ai.ai_persona.allow_chat_direct_messages"
}}</label>
<DTooltip
@icon="question-circle"
@content={{I18n.t
"discourse_ai.ai_persona.allow_chat_direct_messages_help"
}}
/>
</div>
<div
class="control-group ai-persona-editor__allow_chat_channel_mentions"
>
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.allow_chat_channel_mentions}}
/>
{{I18n.t
"discourse_ai.ai_persona.allow_chat_channel_mentions"
}}</label>
<DTooltip
@icon="question-circle"
@content={{I18n.t
"discourse_ai.ai_persona.allow_chat_channel_mentions_help"
}}
/> />
</div> </div>
{{/if}} {{/if}}
<div class="control-group ai-persona-editor__mentionable">
<label>
<Input
@type="checkbox"
@checked={{this.editingModel.mentionable}}
/>
{{I18n.t "discourse_ai.ai_persona.mentionable"}}</label>
<DTooltip
@icon="question-circle"
@content={{I18n.t "discourse_ai.ai_persona.mentionable_help"}}
/>
</div>
{{/if}} {{/if}}
<div class="control-group ai-persona-editor__tool-details"> <div class="control-group ai-persona-editor__tool-details">
<label> <label>

View File

@ -32,6 +32,7 @@ export default class BotSelector extends Component {
@service currentUser; @service currentUser;
@service siteSettings; @service siteSettings;
@tracked llm; @tracked llm;
@tracked allowLLMSelector = true;
STORE_NAMESPACE = "discourse_ai_persona_selector_"; STORE_NAMESPACE = "discourse_ai_persona_selector_";
LLM_STORE_NAMESPACE = "discourse_ai_llm_selector_"; LLM_STORE_NAMESPACE = "discourse_ai_llm_selector_";
@ -54,6 +55,7 @@ export default class BotSelector extends Component {
} }
this.composer.metaData = { ai_persona_id: this._value }; this.composer.metaData = { ai_persona_id: this._value };
this.setAllowLLMSelector();
let llm = this.preferredLlmStore.getObject("id"); let llm = this.preferredLlmStore.getObject("id");
llm = llm || this.llmOptions[0].id; llm = llm || this.llmOptions[0].id;
@ -93,6 +95,15 @@ export default class BotSelector extends Component {
this._value = newValue; this._value = newValue;
this.preferredPersonaStore.setObject({ key: "id", value: newValue }); this.preferredPersonaStore.setObject({ key: "id", value: newValue });
this.composer.metaData = { ai_persona_id: newValue }; this.composer.metaData = { ai_persona_id: newValue };
this.setAllowLLMSelector();
}
setAllowLLMSelector() {
const persona = this.currentUser.ai_enabled_personas.find(
(innerPersona) => innerPersona.id === this._value
);
this.allowLLMSelector = !persona?.force_default_llm;
} }
get currentLlm() { get currentLlm() {
@ -105,7 +116,14 @@ export default class BotSelector extends Component {
(bot) => bot.model_name === this.llm (bot) => bot.model_name === this.llm
).username; ).username;
this.preferredLlmStore.setObject({ key: "id", value: newValue }); this.preferredLlmStore.setObject({ key: "id", value: newValue });
if (this.allowLLMSelector) {
this.composer.set("targetRecipients", botUsername); this.composer.set("targetRecipients", botUsername);
} else {
const persona = this.currentUser.ai_enabled_personas.find(
(innerPersona) => innerPersona.id === this._value
);
this.composer.set("targetRecipients", persona.username || "");
}
} }
get llmOptions() { get llmOptions() {
@ -131,6 +149,7 @@ export default class BotSelector extends Component {
@options={{hash icon="robot" filterable=this.filterable}} @options={{hash icon="robot" filterable=this.filterable}}
/> />
</div> </div>
{{#if this.allowLLMSelector}}
<div class="llm-selector"> <div class="llm-selector">
<DropdownSelectBox <DropdownSelectBox
class="persona-llm-selector__llm-dropdown" class="persona-llm-selector__llm-dropdown"
@ -139,6 +158,7 @@ export default class BotSelector extends Component {
@options={{hash icon="globe"}} @options={{hash icon="globe"}}
/> />
</div> </div>
{{/if}}
</div> </div>
</template> </template>
} }

View File

@ -68,9 +68,12 @@
&__tool-details, &__tool-details,
&__vision_enabled, &__vision_enabled,
&__allow_chat, &__allow_chat_direct_messages,
&__priority, &__allow_chat_channel_mentions,
&__mentionable { &__allow_topic_mentions,
&__allow_personal_messages,
&__force_default_llm,
&__priority {
display: flex; display: flex;
align-items: center; align-items: center;
} }

View File

@ -148,8 +148,15 @@ en:
question_consolidator_llm_help: The language model to use for the question consolidator, you may choose a less powerful model to save costs. question_consolidator_llm_help: The language model to use for the question consolidator, you may choose a less powerful model to save costs.
system_prompt: System Prompt system_prompt: System Prompt
forced_tool_strategy: Forced Tool Strategy forced_tool_strategy: Forced Tool Strategy
allow_chat: "Allow Chat" allow_chat_direct_messages: "Allow Chat Direct Messages"
allow_chat_help: "If enabled, users in allowed groups can DM this persona" allow_chat_direct_messages_help: "If enabled, users in allowed groups can send direct messages to this persona."
allow_chat_channel_mentions: "Allow Chat Channel Mentions"
allow_chat_channel_mentions_help: "If enabled, users in allowed groups can mention this persona in chat channels."
allow_personal_messages: "Allow Personal Messages"
allow_personal_messages_help: "If enabled, users in allowed groups can send personal messages to this persona."
allow_topic_mentions: "Allow Topic Mentions"
allow_topic_mentions_help: "If enabled, users in allowed groups can mention this persona in topics."
force_default_llm: "Always use default Language Model"
save: Save save: Save
saved: AI Persona Saved saved: AI Persona Saved
enabled: "Enabled?" enabled: "Enabled?"

View File

@ -0,0 +1,23 @@
# frozen_string_literal: true
class AiPersonaChatTopicRefactor < ActiveRecord::Migration[7.1]
def change
add_column :ai_personas, :allow_chat_channel_mentions, :boolean, default: false, null: false
add_column :ai_personas, :allow_chat_direct_messages, :boolean, default: false, null: false
add_column :ai_personas, :allow_topic_mentions, :boolean, default: false, null: false
add_column :ai_personas, :allow_personal_messages, :boolean, default: true, null: false
add_column :ai_personas, :force_default_llm, :boolean, default: false, null: false
execute <<~SQL
UPDATE ai_personas
SET allow_chat_channel_mentions = mentionable, allow_chat_direct_messages = true
WHERE allow_chat = true
SQL
execute <<~SQL
UPDATE ai_personas
SET allow_topic_mentions = true
WHERE mentionable = true
SQL
end
end

View File

@ -0,0 +1,7 @@
# frozen_string_literal: true
class AiPersonaPostMigrateDropCols < ActiveRecord::Migration[7.1]
def change
remove_columns :ai_personas, :allow_chat
remove_columns :ai_personas, :mentionable
end
end

View File

@ -8,11 +8,10 @@ module DiscourseAi
Bot = Struct.new(:id, :name, :llm) Bot = Struct.new(:id, :name, :llm)
def self.all_bot_ids def self.all_bot_ids
mentionable_persona_user_ids = AiPersona
AiPersona.mentionables.map { |mentionable| mentionable[:user_id] } .persona_users
mentionable_bot_users = LlmModel.joins(:user).pluck("users.id") .map { |persona| persona[:user_id] }
.concat(LlmModel.where(enabled_chat_bot: true).pluck(:user_id))
mentionable_bot_users + mentionable_persona_user_ids
end end
def self.find_participant_in(participant_ids) def self.find_participant_in(participant_ids)
@ -109,7 +108,13 @@ module DiscourseAi
DiscourseAi::AiBot::Personas::Persona DiscourseAi::AiBot::Personas::Persona
.all(user: scope.user) .all(user: scope.user)
.map do |persona| .map do |persona|
{ id: persona.id, name: persona.name, description: persona.description } {
id: persona.id,
name: persona.name,
description: persona.description,
force_default_llm: persona.force_default_llm,
username: persona.username,
}
end end
end end
@ -140,7 +145,7 @@ module DiscourseAi
{ {
"id" => persona_user[:user_id], "id" => persona_user[:user_id],
"username" => persona_user[:username], "username" => persona_user[:username],
"mentionable" => persona_user[:mentionable], "force_default_llm" => persona_user[:force_default_llm],
"is_persona" => true, "is_persona" => true,
} }
end, end,

View File

@ -21,7 +21,15 @@ module DiscourseAi
nil nil
end end
def allow_chat def force_default_llm
false
end
def allow_chat_channel_mentions
false
end
def allow_chat_direct_messages
false false
end end

View File

@ -11,7 +11,9 @@ module DiscourseAi
def self.find_chat_persona(message, channel, user) def self.find_chat_persona(message, channel, user)
if channel.direct_message_channel? if channel.direct_message_channel?
AiPersona.allowed_chat.find do |p| AiPersona
.allowed_modalities(allow_chat_direct_messages: true)
.find do |p|
p[:user_id].in?(channel.allowed_user_ids) && (user.group_ids & p[:allowed_group_ids]) p[:user_id].in?(channel.allowed_user_ids) && (user.group_ids & p[:allowed_group_ids])
end end
else else
@ -19,9 +21,9 @@ module DiscourseAi
if message.message.include?("@") if message.message.include?("@")
mentions = message.parsed_mentions.parsed_direct_mentions mentions = message.parsed_mentions.parsed_direct_mentions
if mentions.present? if mentions.present?
AiPersona.allowed_chat.find do |p| AiPersona
p[:username].in?(mentions) && (user.group_ids & p[:allowed_group_ids]) .allowed_modalities(allow_chat_channel_mentions: true)
end .find { |p| p[:username].in?(mentions) && (user.group_ids & p[:allowed_group_ids]) }
end end
end end
end end
@ -29,8 +31,14 @@ module DiscourseAi
def self.schedule_chat_reply(message, channel, user, context) def self.schedule_chat_reply(message, channel, user, context)
return if !SiteSetting.ai_bot_enabled return if !SiteSetting.ai_bot_enabled
return if AiPersona.allowed_chat.blank?
return if AiPersona.allowed_chat.any? { |m| m[:user_id] == user.id } all_chat =
AiPersona.allowed_modalities(
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
)
return if all_chat.blank?
return if all_chat.any? { |m| m[:user_id] == user.id }
persona = find_chat_persona(message, channel, user) persona = find_chat_persona(message, channel, user)
return if !persona return if !persona
@ -56,15 +64,23 @@ module DiscourseAi
def self.schedule_reply(post) def self.schedule_reply(post)
return if is_bot_user_id?(post.user_id) return if is_bot_user_id?(post.user_id)
mentionables = nil
bot_ids = LlmModel.joins(:user).pluck("users.id") if post.topic.private_message?
mentionables = AiPersona.mentionables(user: post.user) mentionables =
AiPersona.allowed_modalities(user: post.user, allow_personal_messages: true)
else
mentionables = AiPersona.allowed_modalities(user: post.user, allow_topic_mentions: true)
end
bot_user = nil bot_user = nil
mentioned = nil mentioned = nil
all_llm_user_ids = LlmModel.joins(:user).pluck("users.id")
if post.topic.private_message? if post.topic.private_message?
bot_user = post.topic.topic_allowed_users.where(user_id: bot_ids).first&.user # this is an edge case, you started a PM with a different bot
bot_user = post.topic.topic_allowed_users.where(user_id: all_llm_user_ids).first&.user
bot_user ||= bot_user ||=
post post
.topic .topic
@ -114,6 +130,8 @@ module DiscourseAi
persona ||= DiscourseAi::AiBot::Personas::General persona ||= DiscourseAi::AiBot::Personas::General
bot_user = User.find(persona.user_id) if persona && persona.force_default_llm
bot = DiscourseAi::AiBot::Bot.as(bot_user, persona: persona.new) bot = DiscourseAi::AiBot::Bot.as(bot_user, persona: persona.new)
new(bot).update_playground_with(post) new(bot).update_playground_with(post)
end end

View File

@ -35,15 +35,16 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
expect(serializer[:current_user][:can_debug_ai_bot_conversations]).to eq(true) expect(serializer[:current_user][:can_debug_ai_bot_conversations]).to eq(true)
end end
it "adds mentionables to current_user_serializer" do it "adds information about forcing default llm to current_user_serializer" do
Group.refresh_automatic_groups! Group.refresh_automatic_groups!
persona = persona =
Fabricate( Fabricate(
:ai_persona, :ai_persona,
mentionable: true,
enabled: true, enabled: true,
allowed_group_ids: [bot_allowed_group.id], allowed_group_ids: [bot_allowed_group.id],
default_llm: "claude-2",
force_default_llm: true,
) )
persona.create_user! persona.create_user!
@ -54,7 +55,7 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
persona_bot = bots.find { |bot| bot["id"] == persona.user_id } persona_bot = bots.find { |bot| bot["id"] == persona.user_id }
expect(persona_bot["username"]).to eq(persona.user.username) expect(persona_bot["username"]).to eq(persona.user.username)
expect(persona_bot["mentionable"]).to eq(true) expect(persona_bot["force_default_llm"]).to eq(true)
end end
it "includes user ids for all personas in the serializer" do it "includes user ids for all personas in the serializer" do
@ -69,7 +70,7 @@ RSpec.describe DiscourseAi::AiBot::EntryPoint do
persona_bot = bots.find { |bot| bot["id"] == persona.user_id } persona_bot = bots.find { |bot| bot["id"] == persona.user_id }
expect(persona_bot["username"]).to eq(persona.user.username) expect(persona_bot["username"]).to eq(persona.user.username)
expect(persona_bot["mentionable"]).to eq(false) expect(persona_bot["force_default_llm"]).to eq(false)
end end
it "queues a job to generate a reply by the AI" do it "queues a job to generate a reply by the AI" do

View File

@ -55,6 +55,11 @@ RSpec.describe DiscourseAi::AiBot::Playground do
) )
end end
after do
# we must reset cache on persona cause data can be rolled back
AiPersona.persona_cache.flush!
end
describe "is_bot_user_id?" do describe "is_bot_user_id?" do
it "properly detects ALL bots as bot users" do it "properly detects ALL bots as bot users" do
persona = Fabricate(:ai_persona, enabled: false) persona = Fabricate(:ai_persona, enabled: false)
@ -227,7 +232,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do
vision_enabled: true, vision_enabled: true,
vision_max_pixels: 1_000, vision_max_pixels: 1_000,
default_llm: "custom:#{opus_model.id}", default_llm: "custom:#{opus_model.id}",
mentionable: true, allow_topic_mentions: true,
) )
end end
@ -277,7 +282,11 @@ RSpec.describe DiscourseAi::AiBot::Playground do
) )
persona.create_user! persona.create_user!
persona.update!(default_llm: "custom:#{claude_2.id}", mentionable: true) persona.update!(
default_llm: "custom:#{claude_2.id}",
allow_chat_channel_mentions: true,
allow_topic_mentions: true,
)
persona persona
end end
@ -294,7 +303,7 @@ RSpec.describe DiscourseAi::AiBot::Playground do
SiteSetting.ai_bot_enabled = true SiteSetting.ai_bot_enabled = true
SiteSetting.chat_allowed_groups = "#{Group::AUTO_GROUPS[:trust_level_0]}" SiteSetting.chat_allowed_groups = "#{Group::AUTO_GROUPS[:trust_level_0]}"
Group.refresh_automatic_groups! Group.refresh_automatic_groups!
persona.update!(allow_chat: true, mentionable: true, default_llm: "custom:#{opus_model.id}") persona.update!(allow_chat_channel_mentions: true, default_llm: "custom:#{opus_model.id}")
end end
it "should behave in a sane way when threading is enabled" do it "should behave in a sane way when threading is enabled" do
@ -406,8 +415,9 @@ RSpec.describe DiscourseAi::AiBot::Playground do
SiteSetting.chat_allowed_groups = "#{Group::AUTO_GROUPS[:trust_level_0]}" SiteSetting.chat_allowed_groups = "#{Group::AUTO_GROUPS[:trust_level_0]}"
Group.refresh_automatic_groups! Group.refresh_automatic_groups!
persona.update!( persona.update!(
allow_chat: true, allow_chat_direct_messages: true,
mentionable: false, allow_topic_mentions: false,
allow_chat_channel_mentions: false,
default_llm: "custom:#{opus_model.id}", default_llm: "custom:#{opus_model.id}",
) )
SiteSetting.ai_bot_enabled = true SiteSetting.ai_bot_enabled = true
@ -481,7 +491,6 @@ RSpec.describe DiscourseAi::AiBot::Playground do
# it also needs to include history per config - first feed some history # it also needs to include history per config - first feed some history
persona.update!(enabled: false) persona.update!(enabled: false)
persona_guardian = Guardian.new(persona.user) persona_guardian = Guardian.new(persona.user)
4.times do |i| 4.times do |i|
@ -561,6 +570,8 @@ RSpec.describe DiscourseAi::AiBot::Playground do
# we still should be able to mention with no bots # we still should be able to mention with no bots
toggle_enabled_bots(bots: []) toggle_enabled_bots(bots: [])
persona.update!(allow_topic_mentions: true)
post = nil post = nil
DiscourseAi::Completions::Llm.with_prepared_responses(["Yes I can"]) do DiscourseAi::Completions::Llm.with_prepared_responses(["Yes I can"]) do
post = post =
@ -574,6 +585,16 @@ RSpec.describe DiscourseAi::AiBot::Playground do
last_post = post.topic.posts.order(:post_number).last last_post = post.topic.posts.order(:post_number).last
expect(last_post.raw).to eq("Yes I can") expect(last_post.raw).to eq("Yes I can")
expect(last_post.user_id).to eq(persona.user_id) expect(last_post.user_id).to eq(persona.user_id)
persona.update!(allow_topic_mentions: false)
post =
create_post(
title: "My public topic ABC",
raw: "Hey @#{persona.user.username}, can you help me?",
)
expect(post.topic.posts.last.post_number).to eq(1)
end end
it "allows PMing a persona even when no particular bots are enabled" do it "allows PMing a persona even when no particular bots are enabled" do
@ -603,6 +624,50 @@ RSpec.describe DiscourseAi::AiBot::Playground do
expect(last_post.topic.allowed_users.pluck(:user_id)).to include(persona.user_id) expect(last_post.topic.allowed_users.pluck(:user_id)).to include(persona.user_id)
expect(last_post.topic.participant_count).to eq(2) expect(last_post.topic.participant_count).to eq(2)
# ensure it can be disabled
persona.update!(allow_personal_messages: false)
post =
create_post(
raw: "Hey there #{persona.user.username}, can you help me please",
topic_id: post.topic.id,
user: admin,
)
expect(post.post_number).to eq(3)
end
it "can tether a persona unconditionally to an llm" do
gpt_35_turbo = Fabricate(:llm_model, name: "gpt-3.5-turbo")
# If you start a PM with GPT 3.5 bot, replies should come from it, not from Claude
SiteSetting.ai_bot_enabled = true
toggle_enabled_bots(bots: [gpt_35_turbo, claude_2])
post = nil
persona.update!(force_default_llm: true, default_llm: "custom:#{gpt_35_turbo.id}")
DiscourseAi::Completions::Llm.with_prepared_responses(
["Yes I can", "Magic Title"],
llm: "custom:#{gpt_35_turbo.id}",
) do
post =
create_post(
title: "I just made a PM",
raw: "hello world",
target_usernames: "#{user.username},#{claude_2.user.username}",
archetype: Archetype.private_message,
user: admin,
custom_fields: {
"ai_persona_id" => persona.id,
},
)
end
last_post = post.topic.posts.order(:post_number).last
expect(last_post.raw).to eq("Yes I can")
expect(last_post.user_id).to eq(persona.user_id)
end end
it "picks the correct llm for persona in PMs" do it "picks the correct llm for persona in PMs" do

View File

@ -71,9 +71,12 @@ RSpec.describe AiPersona do
forum_helper = AiPersona.find_by(name: "Forum Helper") forum_helper = AiPersona.find_by(name: "Forum Helper")
forum_helper.update!( forum_helper.update!(
user_id: 1, user_id: 1,
mentionable: true,
default_llm: "anthropic:claude-2", default_llm: "anthropic:claude-2",
max_context_posts: 3, max_context_posts: 3,
allow_topic_mentions: true,
allow_personal_messages: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
) )
klass = forum_helper.class_instance klass = forum_helper.class_instance
@ -83,9 +86,12 @@ RSpec.describe AiPersona do
# tl 0 by default # tl 0 by default
expect(klass.allowed_group_ids).to eq([10]) expect(klass.allowed_group_ids).to eq([10])
expect(klass.user_id).to eq(1) expect(klass.user_id).to eq(1)
expect(klass.mentionable).to eq(true)
expect(klass.default_llm).to eq("anthropic:claude-2") expect(klass.default_llm).to eq("anthropic:claude-2")
expect(klass.max_context_posts).to eq(3) expect(klass.max_context_posts).to eq(3)
expect(klass.allow_topic_mentions).to eq(true)
expect(klass.allow_personal_messages).to eq(true)
expect(klass.allow_chat_channel_mentions).to eq(true)
expect(klass.allow_chat_direct_messages).to eq(true)
end end
it "defines singleton methods non persona classes" do it "defines singleton methods non persona classes" do
@ -98,7 +104,10 @@ RSpec.describe AiPersona do
allowed_group_ids: [], allowed_group_ids: [],
default_llm: "anthropic:claude-2", default_llm: "anthropic:claude-2",
max_context_posts: 3, max_context_posts: 3,
mentionable: true, allow_topic_mentions: true,
allow_personal_messages: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
user_id: 1, user_id: 1,
) )
@ -108,12 +117,15 @@ RSpec.describe AiPersona do
expect(klass.system).to eq(false) expect(klass.system).to eq(false)
expect(klass.allowed_group_ids).to eq([]) expect(klass.allowed_group_ids).to eq([])
expect(klass.user_id).to eq(1) expect(klass.user_id).to eq(1)
expect(klass.mentionable).to eq(true)
expect(klass.default_llm).to eq("anthropic:claude-2") expect(klass.default_llm).to eq("anthropic:claude-2")
expect(klass.max_context_posts).to eq(3) expect(klass.max_context_posts).to eq(3)
expect(klass.allow_topic_mentions).to eq(true)
expect(klass.allow_personal_messages).to eq(true)
expect(klass.allow_chat_channel_mentions).to eq(true)
expect(klass.allow_chat_direct_messages).to eq(true)
end end
it "does not allow setting allow_chat without a default_llm" do it "does not allow setting allowing chat without a default_llm" do
persona = persona =
AiPersona.create( AiPersona.create(
name: "test", name: "test",
@ -121,7 +133,37 @@ RSpec.describe AiPersona do
system_prompt: "test", system_prompt: "test",
allowed_group_ids: [], allowed_group_ids: [],
default_llm: nil, default_llm: nil,
allow_chat: true, allow_chat_channel_mentions: true,
)
expect(persona.valid?).to eq(false)
expect(persona.errors[:default_llm].first).to eq(
I18n.t("discourse_ai.ai_bot.personas.default_llm_required"),
)
persona =
AiPersona.create(
name: "test",
description: "test",
system_prompt: "test",
allowed_group_ids: [],
default_llm: nil,
allow_chat_direct_messages: true,
)
expect(persona.valid?).to eq(false)
expect(persona.errors[:default_llm].first).to eq(
I18n.t("discourse_ai.ai_bot.personas.default_llm_required"),
)
persona =
AiPersona.create(
name: "test",
description: "test",
system_prompt: "test",
allowed_group_ids: [],
default_llm: nil,
allow_topic_mentions: true,
) )
expect(persona.valid?).to eq(false) expect(persona.valid?).to eq(false)

View File

@ -2,7 +2,10 @@
module DiscourseAi::ChatBotHelper module DiscourseAi::ChatBotHelper
def toggle_enabled_bots(bots: []) def toggle_enabled_bots(bots: [])
LlmModel.update_all(enabled_chat_bot: false) models = LlmModel.all
models = models.where("id not in (?)", bots.map(&:id)) if bots.present?
models.update_all(enabled_chat_bot: false)
bots.each { |b| b.update!(enabled_chat_bot: true) } bots.each { |b| b.update!(enabled_chat_bot: true) }
DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots DiscourseAi::AiBot::SiteSettingsExtension.enable_or_disable_ai_bots
end end

View File

@ -40,7 +40,10 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
:ai_persona, :ai_persona,
name: "search2", name: "search2",
tools: [["SearchCommand", { base_query: "test" }, true]], tools: [["SearchCommand", { base_query: "test" }, true]],
mentionable: true, allow_topic_mentions: true,
allow_personal_messages: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
default_llm: "anthropic:claude-2", default_llm: "anthropic:claude-2",
forced_tool_count: 2, forced_tool_count: 2,
) )
@ -52,7 +55,11 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
serializer_persona1 = response.parsed_body["ai_personas"].find { |p| p["id"] == persona1.id } serializer_persona1 = response.parsed_body["ai_personas"].find { |p| p["id"] == persona1.id }
serializer_persona2 = response.parsed_body["ai_personas"].find { |p| p["id"] == persona2.id } serializer_persona2 = response.parsed_body["ai_personas"].find { |p| p["id"] == persona2.id }
expect(serializer_persona2["mentionable"]).to eq(true) expect(serializer_persona2["allow_topic_mentions"]).to eq(true)
expect(serializer_persona2["allow_personal_messages"]).to eq(true)
expect(serializer_persona2["allow_chat_channel_mentions"]).to eq(true)
expect(serializer_persona2["allow_chat_direct_messages"]).to eq(true)
expect(serializer_persona2["default_llm"]).to eq("anthropic:claude-2") expect(serializer_persona2["default_llm"]).to eq("anthropic:claude-2")
expect(serializer_persona2["user_id"]).to eq(persona2.user_id) expect(serializer_persona2["user_id"]).to eq(persona2.user_id)
expect(serializer_persona2["user"]["id"]).to eq(persona2.user_id) expect(serializer_persona2["user"]["id"]).to eq(persona2.user_id)
@ -167,7 +174,10 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
tools: [["search", { "base_query" => "test" }, true]], tools: [["search", { "base_query" => "test" }, true]],
top_p: 0.1, top_p: 0.1,
temperature: 0.5, temperature: 0.5,
mentionable: true, allow_topic_mentions: true,
allow_personal_messages: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
default_llm: "anthropic:claude-2", default_llm: "anthropic:claude-2",
forced_tool_count: 2, forced_tool_count: 2,
} }
@ -186,9 +196,12 @@ RSpec.describe DiscourseAi::Admin::AiPersonasController do
expect(persona_json["name"]).to eq("superbot") expect(persona_json["name"]).to eq("superbot")
expect(persona_json["top_p"]).to eq(0.1) expect(persona_json["top_p"]).to eq(0.1)
expect(persona_json["temperature"]).to eq(0.5) expect(persona_json["temperature"]).to eq(0.5)
expect(persona_json["mentionable"]).to eq(true)
expect(persona_json["default_llm"]).to eq("anthropic:claude-2") expect(persona_json["default_llm"]).to eq("anthropic:claude-2")
expect(persona_json["forced_tool_count"]).to eq(2) expect(persona_json["forced_tool_count"]).to eq(2)
expect(persona_json["allow_topic_mentions"]).to eq(true)
expect(persona_json["allow_personal_messages"]).to eq(true)
expect(persona_json["allow_chat_channel_mentions"]).to eq(true)
expect(persona_json["allow_chat_direct_messages"]).to eq(true)
persona = AiPersona.find(persona_json["id"]) persona = AiPersona.find(persona_json["id"])

View File

@ -63,8 +63,6 @@ describe DiscourseAi::Embeddings::EmbeddingsController do
context "when rate limiting is enabled" do context "when rate limiting is enabled" do
before { RateLimiter.enable } before { RateLimiter.enable }
use_redis_snapshotting
it "will rate limit correctly" do it "will rate limit correctly" do
stub_const(subject.class, :MAX_HYDE_SEARCHES_PER_MINUTE, 1) do stub_const(subject.class, :MAX_HYDE_SEARCHES_PER_MINUTE, 1) do
stub_const(subject.class, :MAX_SEARCHES_PER_MINUTE, 2) do stub_const(subject.class, :MAX_SEARCHES_PER_MINUTE, 2) do

View File

@ -37,8 +37,8 @@ module("Discourse AI | Unit | Model | ai-persona", function () {
description: "Description", description: "Description",
top_p: 0.8, top_p: 0.8,
temperature: 0.7, temperature: 0.7,
mentionable: false,
default_llm: "Default LLM", default_llm: "Default LLM",
force_default_llm: false,
user: null, user: null,
user_id: null, user_id: null,
max_context_posts: 5, max_context_posts: 5,
@ -52,6 +52,10 @@ module("Discourse AI | Unit | Model | ai-persona", function () {
allow_chat: false, allow_chat: false,
tool_details: true, tool_details: true,
forced_tool_count: -1, forced_tool_count: -1,
allow_personal_messages: true,
allow_topic_mentions: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
}; };
const aiPersona = AiPersona.create({ ...properties }); const aiPersona = AiPersona.create({ ...properties });
@ -82,7 +86,6 @@ module("Discourse AI | Unit | Model | ai-persona", function () {
user: null, user: null,
user_id: null, user_id: null,
default_llm: "Default LLM", default_llm: "Default LLM",
mentionable: false,
max_context_posts: 5, max_context_posts: 5,
vision_enabled: true, vision_enabled: true,
vision_max_pixels: 100, vision_max_pixels: 100,
@ -94,6 +97,11 @@ module("Discourse AI | Unit | Model | ai-persona", function () {
allow_chat: false, allow_chat: false,
tool_details: true, tool_details: true,
forced_tool_count: -1, forced_tool_count: -1,
allow_personal_messages: true,
allow_topic_mentions: true,
allow_chat_channel_mentions: true,
allow_chat_direct_messages: true,
force_default_llm: false,
}; };
const aiPersona = AiPersona.create({ ...properties }); const aiPersona = AiPersona.create({ ...properties });